onnx.OnnxMl Maven / Gradle / Ivy
The newest version!
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: onnx-ml.proto
package onnx;
public final class OnnxMl {
private OnnxMl() {}
public static void registerAllExtensions(
org.nd4j.shade.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.nd4j.shade.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.nd4j.shade.protobuf.ExtensionRegistryLite) registry);
}
/**
*
* Versioning
* ONNX versioning is specified in docs/IR.md and elaborated on in docs/Versioning.md
* To be compatible with both proto2 and proto3, we will use a version number
* that is not defined by the default value but an explicit enum number.
*
*
* Protobuf enum {@code onnx.Version}
*/
public enum Version
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
*
* proto3 requires the first enum value to be zero.
* We add this just to appease the compiler.
*
*
* _START_VERSION = 0;
*/
_START_VERSION(0),
/**
*
* The version field is always serialized and we will use it to store the
* version that the graph is generated from. This helps us set up version
* control.
* For the IR, we are using simple numbers starting with 0x00000001,
* which was the version we published on Oct 10, 2017.
*
*
* IR_VERSION_2017_10_10 = 1;
*/
IR_VERSION_2017_10_10(1),
/**
*
* IR_VERSION 2 published on Oct 30, 2017
* - Added type discriminator to AttributeProto to support proto3 users
*
*
* IR_VERSION_2017_10_30 = 2;
*/
IR_VERSION_2017_10_30(2),
/**
*
* IR VERSION 3 published on Nov 3, 2017
* - For operator versioning:
* - Added new message OperatorSetIdProto
* - Added opset_import in ModelProto
* - For vendor extensions, added domain in NodeProto
*
*
* IR_VERSION_2017_11_3 = 3;
*/
IR_VERSION_2017_11_3(3),
/**
*
* IR VERSION 4 published on Jan 22, 2019
* - Relax constraint that initializers should be a subset of graph inputs
* - Add type BFLOAT16
*
*
* IR_VERSION_2019_1_22 = 4;
*/
IR_VERSION_2019_1_22(4),
/**
*
* IR VERSION 5 published on March 18, 2019
* - Add message TensorAnnotation.
* - Add quantization annotation in GraphProto to map tensor with its scale and zero point quantization parameters.
*
*
* IR_VERSION_2019_3_18 = 5;
*/
IR_VERSION_2019_3_18(5),
/**
*
* IR VERSION 6 published on Sep 19, 2019
* - Add support for sparse tensor constants stored in model.
* - Add message SparseTensorProto
* - Add sparse initializers
*
*
* IR_VERSION_2019_9_19 = 6;
*/
IR_VERSION_2019_9_19(6),
/**
*
* IR VERSION 7 published on May 8, 2020
* - Add support to allow function body graph to rely on multiple external opreator sets.
* - Add a list to promote inference graph's initializers to global and
* mutable variables. Global variables are visible in all graphs of the
* stored models.
* - Add message TrainingInfoProto to store initialization
* method and training algorithm. The execution of TrainingInfoProto
* can modify the values of mutable variables.
* - Implicitly add inference graph into each TrainingInfoProto's algorithm.
*
*
* IR_VERSION_2020_5_8 = 7;
*/
IR_VERSION_2020_5_8(7),
/**
*
* IR VERSION 8 published on <TBD>
* Introduce TypeProto.SparseTensor
* Introduce TypeProto.Optional
* Added a list of FunctionProtos local to the model
* Deprecated since_version and operator status from FunctionProto
*
*
* IR_VERSION = 8;
*/
IR_VERSION(8),
UNRECOGNIZED(-1),
;
/**
*
* proto3 requires the first enum value to be zero.
* We add this just to appease the compiler.
*
*
* _START_VERSION = 0;
*/
public static final int _START_VERSION_VALUE = 0;
/**
*
* The version field is always serialized and we will use it to store the
* version that the graph is generated from. This helps us set up version
* control.
* For the IR, we are using simple numbers starting with 0x00000001,
* which was the version we published on Oct 10, 2017.
*
*
* IR_VERSION_2017_10_10 = 1;
*/
public static final int IR_VERSION_2017_10_10_VALUE = 1;
/**
*
* IR_VERSION 2 published on Oct 30, 2017
* - Added type discriminator to AttributeProto to support proto3 users
*
*
* IR_VERSION_2017_10_30 = 2;
*/
public static final int IR_VERSION_2017_10_30_VALUE = 2;
/**
*
* IR VERSION 3 published on Nov 3, 2017
* - For operator versioning:
* - Added new message OperatorSetIdProto
* - Added opset_import in ModelProto
* - For vendor extensions, added domain in NodeProto
*
*
* IR_VERSION_2017_11_3 = 3;
*/
public static final int IR_VERSION_2017_11_3_VALUE = 3;
/**
*
* IR VERSION 4 published on Jan 22, 2019
* - Relax constraint that initializers should be a subset of graph inputs
* - Add type BFLOAT16
*
*
* IR_VERSION_2019_1_22 = 4;
*/
public static final int IR_VERSION_2019_1_22_VALUE = 4;
/**
*
* IR VERSION 5 published on March 18, 2019
* - Add message TensorAnnotation.
* - Add quantization annotation in GraphProto to map tensor with its scale and zero point quantization parameters.
*
*
* IR_VERSION_2019_3_18 = 5;
*/
public static final int IR_VERSION_2019_3_18_VALUE = 5;
/**
*
* IR VERSION 6 published on Sep 19, 2019
* - Add support for sparse tensor constants stored in model.
* - Add message SparseTensorProto
* - Add sparse initializers
*
*
* IR_VERSION_2019_9_19 = 6;
*/
public static final int IR_VERSION_2019_9_19_VALUE = 6;
/**
*
* IR VERSION 7 published on May 8, 2020
* - Add support to allow function body graph to rely on multiple external opreator sets.
* - Add a list to promote inference graph's initializers to global and
* mutable variables. Global variables are visible in all graphs of the
* stored models.
* - Add message TrainingInfoProto to store initialization
* method and training algorithm. The execution of TrainingInfoProto
* can modify the values of mutable variables.
* - Implicitly add inference graph into each TrainingInfoProto's algorithm.
*
*
* IR_VERSION_2020_5_8 = 7;
*/
public static final int IR_VERSION_2020_5_8_VALUE = 7;
/**
*
* IR VERSION 8 published on <TBD>
* Introduce TypeProto.SparseTensor
* Introduce TypeProto.Optional
* Added a list of FunctionProtos local to the model
* Deprecated since_version and operator status from FunctionProto
*
*
* IR_VERSION = 8;
*/
public static final int IR_VERSION_VALUE = 8;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Version valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Version forNumber(int value) {
switch (value) {
case 0: return _START_VERSION;
case 1: return IR_VERSION_2017_10_10;
case 2: return IR_VERSION_2017_10_30;
case 3: return IR_VERSION_2017_11_3;
case 4: return IR_VERSION_2019_1_22;
case 5: return IR_VERSION_2019_3_18;
case 6: return IR_VERSION_2019_9_19;
case 7: return IR_VERSION_2020_5_8;
case 8: return IR_VERSION;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
Version> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public Version findValueByNumber(int number) {
return Version.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return onnx.OnnxMl.getDescriptor().getEnumTypes().get(0);
}
private static final Version[] VALUES = values();
public static Version valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Version(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:onnx.Version)
}
/**
*
* Operator/function status.
*
*
* Protobuf enum {@code onnx.OperatorStatus}
*/
public enum OperatorStatus
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
* EXPERIMENTAL = 0;
*/
EXPERIMENTAL(0),
/**
* STABLE = 1;
*/
STABLE(1),
UNRECOGNIZED(-1),
;
/**
* EXPERIMENTAL = 0;
*/
public static final int EXPERIMENTAL_VALUE = 0;
/**
* STABLE = 1;
*/
public static final int STABLE_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OperatorStatus valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static OperatorStatus forNumber(int value) {
switch (value) {
case 0: return EXPERIMENTAL;
case 1: return STABLE;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
OperatorStatus> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public OperatorStatus findValueByNumber(int number) {
return OperatorStatus.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return onnx.OnnxMl.getDescriptor().getEnumTypes().get(1);
}
private static final OperatorStatus[] VALUES = values();
public static OperatorStatus valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private OperatorStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:onnx.OperatorStatus)
}
public interface AttributeProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.AttributeProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return The name.
*/
java.lang.String getName();
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return The bytes for name.
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return The refAttrName.
*/
java.lang.String getRefAttrName();
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return The bytes for refAttrName.
*/
org.nd4j.shade.protobuf.ByteString
getRefAttrNameBytes();
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return The docString.
*/
java.lang.String getDocString();
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return The bytes for docString.
*/
org.nd4j.shade.protobuf.ByteString
getDocStringBytes();
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return The enum numeric value on the wire for type.
*/
int getTypeValue();
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return The type.
*/
onnx.OnnxMl.AttributeProto.AttributeType getType();
/**
*
* Exactly ONE of the following fields must be present for this version of the IR
*
*
* float f = 2;
* @return The f.
*/
float getF();
/**
*
* int
*
*
* int64 i = 3;
* @return The i.
*/
long getI();
/**
*
* UTF-8 string
*
*
* bytes s = 4;
* @return The s.
*/
org.nd4j.shade.protobuf.ByteString getS();
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
* @return Whether the t field is set.
*/
boolean hasT();
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
* @return The t.
*/
onnx.OnnxMl.TensorProto getT();
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
onnx.OnnxMl.TensorProtoOrBuilder getTOrBuilder();
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
* @return Whether the g field is set.
*/
boolean hasG();
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
* @return The g.
*/
onnx.OnnxMl.GraphProto getG();
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
onnx.OnnxMl.GraphProtoOrBuilder getGOrBuilder();
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
* @return Whether the sparseTensor field is set.
*/
boolean hasSparseTensor();
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
* @return The sparseTensor.
*/
onnx.OnnxMl.SparseTensorProto getSparseTensor();
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseTensorOrBuilder();
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
* @return Whether the tp field is set.
*/
boolean hasTp();
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
* @return The tp.
*/
onnx.OnnxMl.TypeProto getTp();
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
onnx.OnnxMl.TypeProtoOrBuilder getTpOrBuilder();
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return A list containing the floats.
*/
java.util.List getFloatsList();
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return The count of floats.
*/
int getFloatsCount();
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @param index The index of the element to return.
* @return The floats at the given index.
*/
float getFloats(int index);
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return A list containing the ints.
*/
java.util.List getIntsList();
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return The count of ints.
*/
int getIntsCount();
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @param index The index of the element to return.
* @return The ints at the given index.
*/
long getInts(int index);
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return A list containing the strings.
*/
java.util.List getStringsList();
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return The count of strings.
*/
int getStringsCount();
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @param index The index of the element to return.
* @return The strings at the given index.
*/
org.nd4j.shade.protobuf.ByteString getStrings(int index);
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
java.util.List
getTensorsList();
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
onnx.OnnxMl.TensorProto getTensors(int index);
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
int getTensorsCount();
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
java.util.List
getTensorsOrBuilderList();
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
onnx.OnnxMl.TensorProtoOrBuilder getTensorsOrBuilder(
int index);
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
java.util.List
getGraphsList();
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
onnx.OnnxMl.GraphProto getGraphs(int index);
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
int getGraphsCount();
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
java.util.List
getGraphsOrBuilderList();
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
onnx.OnnxMl.GraphProtoOrBuilder getGraphsOrBuilder(
int index);
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
java.util.List
getSparseTensorsList();
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
onnx.OnnxMl.SparseTensorProto getSparseTensors(int index);
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
int getSparseTensorsCount();
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
java.util.List
getSparseTensorsOrBuilderList();
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseTensorsOrBuilder(
int index);
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
java.util.List
getTypeProtosList();
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
onnx.OnnxMl.TypeProto getTypeProtos(int index);
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
int getTypeProtosCount();
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
java.util.List
getTypeProtosOrBuilderList();
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
onnx.OnnxMl.TypeProtoOrBuilder getTypeProtosOrBuilder(
int index);
}
/**
*
* Attributes
* A named attribute containing either singular float, integer, string, graph,
* and tensor values, or repeated float, integer, string, graph, and tensor values.
* An AttributeProto MUST contain the name field, and *only one* of the
* following content fields, effectively enforcing a C/C++ union equivalent.
*
*
* Protobuf type {@code onnx.AttributeProto}
*/
public static final class AttributeProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.AttributeProto)
AttributeProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AttributeProto.newBuilder() to construct.
private AttributeProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private AttributeProto() {
name_ = "";
refAttrName_ = "";
docString_ = "";
type_ = 0;
s_ = org.nd4j.shade.protobuf.ByteString.EMPTY;
floats_ = emptyFloatList();
ints_ = emptyLongList();
strings_ = java.util.Collections.emptyList();
tensors_ = java.util.Collections.emptyList();
graphs_ = java.util.Collections.emptyList();
sparseTensors_ = java.util.Collections.emptyList();
typeProtos_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AttributeProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AttributeProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 21: {
f_ = input.readFloat();
break;
}
case 24: {
i_ = input.readInt64();
break;
}
case 34: {
s_ = input.readBytes();
break;
}
case 42: {
onnx.OnnxMl.TensorProto.Builder subBuilder = null;
if (t_ != null) {
subBuilder = t_.toBuilder();
}
t_ = input.readMessage(onnx.OnnxMl.TensorProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(t_);
t_ = subBuilder.buildPartial();
}
break;
}
case 50: {
onnx.OnnxMl.GraphProto.Builder subBuilder = null;
if (g_ != null) {
subBuilder = g_.toBuilder();
}
g_ = input.readMessage(onnx.OnnxMl.GraphProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(g_);
g_ = subBuilder.buildPartial();
}
break;
}
case 61: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
floats_ = newFloatList();
mutable_bitField0_ |= 0x00000001;
}
floats_.addFloat(input.readFloat());
break;
}
case 58: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
floats_ = newFloatList();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
floats_.addFloat(input.readFloat());
}
input.popLimit(limit);
break;
}
case 64: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
ints_ = newLongList();
mutable_bitField0_ |= 0x00000002;
}
ints_.addLong(input.readInt64());
break;
}
case 66: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000002) != 0) && input.getBytesUntilLimit() > 0) {
ints_ = newLongList();
mutable_bitField0_ |= 0x00000002;
}
while (input.getBytesUntilLimit() > 0) {
ints_.addLong(input.readInt64());
}
input.popLimit(limit);
break;
}
case 74: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
strings_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
strings_.add(input.readBytes());
break;
}
case 82: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
tensors_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
tensors_.add(
input.readMessage(onnx.OnnxMl.TensorProto.parser(), extensionRegistry));
break;
}
case 90: {
if (!((mutable_bitField0_ & 0x00000010) != 0)) {
graphs_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000010;
}
graphs_.add(
input.readMessage(onnx.OnnxMl.GraphProto.parser(), extensionRegistry));
break;
}
case 106: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
case 114: {
onnx.OnnxMl.TypeProto.Builder subBuilder = null;
if (tp_ != null) {
subBuilder = tp_.toBuilder();
}
tp_ = input.readMessage(onnx.OnnxMl.TypeProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tp_);
tp_ = subBuilder.buildPartial();
}
break;
}
case 122: {
if (!((mutable_bitField0_ & 0x00000040) != 0)) {
typeProtos_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000040;
}
typeProtos_.add(
input.readMessage(onnx.OnnxMl.TypeProto.parser(), extensionRegistry));
break;
}
case 160: {
int rawValue = input.readEnum();
type_ = rawValue;
break;
}
case 170: {
java.lang.String s = input.readStringRequireUtf8();
refAttrName_ = s;
break;
}
case 178: {
onnx.OnnxMl.SparseTensorProto.Builder subBuilder = null;
if (sparseTensor_ != null) {
subBuilder = sparseTensor_.toBuilder();
}
sparseTensor_ = input.readMessage(onnx.OnnxMl.SparseTensorProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(sparseTensor_);
sparseTensor_ = subBuilder.buildPartial();
}
break;
}
case 186: {
if (!((mutable_bitField0_ & 0x00000020) != 0)) {
sparseTensors_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000020;
}
sparseTensors_.add(
input.readMessage(onnx.OnnxMl.SparseTensorProto.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
floats_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
ints_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
strings_ = java.util.Collections.unmodifiableList(strings_); // C
}
if (((mutable_bitField0_ & 0x00000008) != 0)) {
tensors_ = java.util.Collections.unmodifiableList(tensors_);
}
if (((mutable_bitField0_ & 0x00000010) != 0)) {
graphs_ = java.util.Collections.unmodifiableList(graphs_);
}
if (((mutable_bitField0_ & 0x00000040) != 0)) {
typeProtos_ = java.util.Collections.unmodifiableList(typeProtos_);
}
if (((mutable_bitField0_ & 0x00000020) != 0)) {
sparseTensors_ = java.util.Collections.unmodifiableList(sparseTensors_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_AttributeProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_AttributeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.AttributeProto.class, onnx.OnnxMl.AttributeProto.Builder.class);
}
/**
*
* Note: this enum is structurally identical to the OpSchema::AttrType
* enum defined in schema.h. If you rev one, you likely need to rev the other.
*
*
* Protobuf enum {@code onnx.AttributeProto.AttributeType}
*/
public enum AttributeType
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
* UNDEFINED = 0;
*/
UNDEFINED(0),
/**
* FLOAT = 1;
*/
FLOAT(1),
/**
* INT = 2;
*/
INT(2),
/**
* STRING = 3;
*/
STRING(3),
/**
* TENSOR = 4;
*/
TENSOR(4),
/**
* GRAPH = 5;
*/
GRAPH(5),
/**
* SPARSE_TENSOR = 11;
*/
SPARSE_TENSOR(11),
/**
* TYPE_PROTO = 13;
*/
TYPE_PROTO(13),
/**
* FLOATS = 6;
*/
FLOATS(6),
/**
* INTS = 7;
*/
INTS(7),
/**
* STRINGS = 8;
*/
STRINGS(8),
/**
* TENSORS = 9;
*/
TENSORS(9),
/**
* GRAPHS = 10;
*/
GRAPHS(10),
/**
* SPARSE_TENSORS = 12;
*/
SPARSE_TENSORS(12),
/**
* TYPE_PROTOS = 14;
*/
TYPE_PROTOS(14),
UNRECOGNIZED(-1),
;
/**
* UNDEFINED = 0;
*/
public static final int UNDEFINED_VALUE = 0;
/**
* FLOAT = 1;
*/
public static final int FLOAT_VALUE = 1;
/**
* INT = 2;
*/
public static final int INT_VALUE = 2;
/**
* STRING = 3;
*/
public static final int STRING_VALUE = 3;
/**
* TENSOR = 4;
*/
public static final int TENSOR_VALUE = 4;
/**
* GRAPH = 5;
*/
public static final int GRAPH_VALUE = 5;
/**
* SPARSE_TENSOR = 11;
*/
public static final int SPARSE_TENSOR_VALUE = 11;
/**
* TYPE_PROTO = 13;
*/
public static final int TYPE_PROTO_VALUE = 13;
/**
* FLOATS = 6;
*/
public static final int FLOATS_VALUE = 6;
/**
* INTS = 7;
*/
public static final int INTS_VALUE = 7;
/**
* STRINGS = 8;
*/
public static final int STRINGS_VALUE = 8;
/**
* TENSORS = 9;
*/
public static final int TENSORS_VALUE = 9;
/**
* GRAPHS = 10;
*/
public static final int GRAPHS_VALUE = 10;
/**
* SPARSE_TENSORS = 12;
*/
public static final int SPARSE_TENSORS_VALUE = 12;
/**
* TYPE_PROTOS = 14;
*/
public static final int TYPE_PROTOS_VALUE = 14;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AttributeType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static AttributeType forNumber(int value) {
switch (value) {
case 0: return UNDEFINED;
case 1: return FLOAT;
case 2: return INT;
case 3: return STRING;
case 4: return TENSOR;
case 5: return GRAPH;
case 11: return SPARSE_TENSOR;
case 13: return TYPE_PROTO;
case 6: return FLOATS;
case 7: return INTS;
case 8: return STRINGS;
case 9: return TENSORS;
case 10: return GRAPHS;
case 12: return SPARSE_TENSORS;
case 14: return TYPE_PROTOS;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
AttributeType> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public AttributeType findValueByNumber(int number) {
return AttributeType.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return onnx.OnnxMl.AttributeProto.getDescriptor().getEnumTypes().get(0);
}
private static final AttributeType[] VALUES = values();
public static AttributeType valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private AttributeType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:onnx.AttributeProto.AttributeType)
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return The bytes for name.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int REF_ATTR_NAME_FIELD_NUMBER = 21;
private volatile java.lang.Object refAttrName_;
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return The refAttrName.
*/
@java.lang.Override
public java.lang.String getRefAttrName() {
java.lang.Object ref = refAttrName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
refAttrName_ = s;
return s;
}
}
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return The bytes for refAttrName.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getRefAttrNameBytes() {
java.lang.Object ref = refAttrName_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
refAttrName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DOC_STRING_FIELD_NUMBER = 13;
private volatile java.lang.Object docString_;
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return The docString.
*/
@java.lang.Override
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
}
}
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return The bytes for docString.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 20;
private int type_;
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override public int getTypeValue() {
return type_;
}
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return The type.
*/
@java.lang.Override public onnx.OnnxMl.AttributeProto.AttributeType getType() {
@SuppressWarnings("deprecation")
onnx.OnnxMl.AttributeProto.AttributeType result = onnx.OnnxMl.AttributeProto.AttributeType.valueOf(type_);
return result == null ? onnx.OnnxMl.AttributeProto.AttributeType.UNRECOGNIZED : result;
}
public static final int F_FIELD_NUMBER = 2;
private float f_;
/**
*
* Exactly ONE of the following fields must be present for this version of the IR
*
*
* float f = 2;
* @return The f.
*/
@java.lang.Override
public float getF() {
return f_;
}
public static final int I_FIELD_NUMBER = 3;
private long i_;
/**
*
* int
*
*
* int64 i = 3;
* @return The i.
*/
@java.lang.Override
public long getI() {
return i_;
}
public static final int S_FIELD_NUMBER = 4;
private org.nd4j.shade.protobuf.ByteString s_;
/**
*
* UTF-8 string
*
*
* bytes s = 4;
* @return The s.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString getS() {
return s_;
}
public static final int T_FIELD_NUMBER = 5;
private onnx.OnnxMl.TensorProto t_;
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
* @return Whether the t field is set.
*/
@java.lang.Override
public boolean hasT() {
return t_ != null;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
* @return The t.
*/
@java.lang.Override
public onnx.OnnxMl.TensorProto getT() {
return t_ == null ? onnx.OnnxMl.TensorProto.getDefaultInstance() : t_;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
@java.lang.Override
public onnx.OnnxMl.TensorProtoOrBuilder getTOrBuilder() {
return getT();
}
public static final int G_FIELD_NUMBER = 6;
private onnx.OnnxMl.GraphProto g_;
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
* @return Whether the g field is set.
*/
@java.lang.Override
public boolean hasG() {
return g_ != null;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
* @return The g.
*/
@java.lang.Override
public onnx.OnnxMl.GraphProto getG() {
return g_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : g_;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
@java.lang.Override
public onnx.OnnxMl.GraphProtoOrBuilder getGOrBuilder() {
return getG();
}
public static final int SPARSE_TENSOR_FIELD_NUMBER = 22;
private onnx.OnnxMl.SparseTensorProto sparseTensor_;
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
* @return Whether the sparseTensor field is set.
*/
@java.lang.Override
public boolean hasSparseTensor() {
return sparseTensor_ != null;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
* @return The sparseTensor.
*/
@java.lang.Override
public onnx.OnnxMl.SparseTensorProto getSparseTensor() {
return sparseTensor_ == null ? onnx.OnnxMl.SparseTensorProto.getDefaultInstance() : sparseTensor_;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
@java.lang.Override
public onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseTensorOrBuilder() {
return getSparseTensor();
}
public static final int TP_FIELD_NUMBER = 14;
private onnx.OnnxMl.TypeProto tp_;
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
* @return Whether the tp field is set.
*/
@java.lang.Override
public boolean hasTp() {
return tp_ != null;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
* @return The tp.
*/
@java.lang.Override
public onnx.OnnxMl.TypeProto getTp() {
return tp_ == null ? onnx.OnnxMl.TypeProto.getDefaultInstance() : tp_;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
@java.lang.Override
public onnx.OnnxMl.TypeProtoOrBuilder getTpOrBuilder() {
return getTp();
}
public static final int FLOATS_FIELD_NUMBER = 7;
private org.nd4j.shade.protobuf.Internal.FloatList floats_;
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return A list containing the floats.
*/
@java.lang.Override
public java.util.List
getFloatsList() {
return floats_;
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return The count of floats.
*/
public int getFloatsCount() {
return floats_.size();
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @param index The index of the element to return.
* @return The floats at the given index.
*/
public float getFloats(int index) {
return floats_.getFloat(index);
}
private int floatsMemoizedSerializedSize = -1;
public static final int INTS_FIELD_NUMBER = 8;
private org.nd4j.shade.protobuf.Internal.LongList ints_;
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return A list containing the ints.
*/
@java.lang.Override
public java.util.List
getIntsList() {
return ints_;
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return The count of ints.
*/
public int getIntsCount() {
return ints_.size();
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @param index The index of the element to return.
* @return The ints at the given index.
*/
public long getInts(int index) {
return ints_.getLong(index);
}
private int intsMemoizedSerializedSize = -1;
public static final int STRINGS_FIELD_NUMBER = 9;
private java.util.List strings_;
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return A list containing the strings.
*/
@java.lang.Override
public java.util.List
getStringsList() {
return strings_;
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return The count of strings.
*/
public int getStringsCount() {
return strings_.size();
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @param index The index of the element to return.
* @return The strings at the given index.
*/
public org.nd4j.shade.protobuf.ByteString getStrings(int index) {
return strings_.get(index);
}
public static final int TENSORS_FIELD_NUMBER = 10;
private java.util.List tensors_;
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
@java.lang.Override
public java.util.List getTensorsList() {
return tensors_;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
@java.lang.Override
public java.util.List
getTensorsOrBuilderList() {
return tensors_;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
@java.lang.Override
public int getTensorsCount() {
return tensors_.size();
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
@java.lang.Override
public onnx.OnnxMl.TensorProto getTensors(int index) {
return tensors_.get(index);
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
@java.lang.Override
public onnx.OnnxMl.TensorProtoOrBuilder getTensorsOrBuilder(
int index) {
return tensors_.get(index);
}
public static final int GRAPHS_FIELD_NUMBER = 11;
private java.util.List graphs_;
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
@java.lang.Override
public java.util.List getGraphsList() {
return graphs_;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
@java.lang.Override
public java.util.List
getGraphsOrBuilderList() {
return graphs_;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
@java.lang.Override
public int getGraphsCount() {
return graphs_.size();
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
@java.lang.Override
public onnx.OnnxMl.GraphProto getGraphs(int index) {
return graphs_.get(index);
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
@java.lang.Override
public onnx.OnnxMl.GraphProtoOrBuilder getGraphsOrBuilder(
int index) {
return graphs_.get(index);
}
public static final int SPARSE_TENSORS_FIELD_NUMBER = 23;
private java.util.List sparseTensors_;
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
@java.lang.Override
public java.util.List getSparseTensorsList() {
return sparseTensors_;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
@java.lang.Override
public java.util.List
getSparseTensorsOrBuilderList() {
return sparseTensors_;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
@java.lang.Override
public int getSparseTensorsCount() {
return sparseTensors_.size();
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
@java.lang.Override
public onnx.OnnxMl.SparseTensorProto getSparseTensors(int index) {
return sparseTensors_.get(index);
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
@java.lang.Override
public onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseTensorsOrBuilder(
int index) {
return sparseTensors_.get(index);
}
public static final int TYPE_PROTOS_FIELD_NUMBER = 15;
private java.util.List typeProtos_;
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
@java.lang.Override
public java.util.List getTypeProtosList() {
return typeProtos_;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
@java.lang.Override
public java.util.List
getTypeProtosOrBuilderList() {
return typeProtos_;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
@java.lang.Override
public int getTypeProtosCount() {
return typeProtos_.size();
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
@java.lang.Override
public onnx.OnnxMl.TypeProto getTypeProtos(int index) {
return typeProtos_.get(index);
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
@java.lang.Override
public onnx.OnnxMl.TypeProtoOrBuilder getTypeProtosOrBuilder(
int index) {
return typeProtos_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (java.lang.Float.floatToRawIntBits(f_) != 0) {
output.writeFloat(2, f_);
}
if (i_ != 0L) {
output.writeInt64(3, i_);
}
if (!s_.isEmpty()) {
output.writeBytes(4, s_);
}
if (t_ != null) {
output.writeMessage(5, getT());
}
if (g_ != null) {
output.writeMessage(6, getG());
}
if (getFloatsList().size() > 0) {
output.writeUInt32NoTag(58);
output.writeUInt32NoTag(floatsMemoizedSerializedSize);
}
for (int i = 0; i < floats_.size(); i++) {
output.writeFloatNoTag(floats_.getFloat(i));
}
if (getIntsList().size() > 0) {
output.writeUInt32NoTag(66);
output.writeUInt32NoTag(intsMemoizedSerializedSize);
}
for (int i = 0; i < ints_.size(); i++) {
output.writeInt64NoTag(ints_.getLong(i));
}
for (int i = 0; i < strings_.size(); i++) {
output.writeBytes(9, strings_.get(i));
}
for (int i = 0; i < tensors_.size(); i++) {
output.writeMessage(10, tensors_.get(i));
}
for (int i = 0; i < graphs_.size(); i++) {
output.writeMessage(11, graphs_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 13, docString_);
}
if (tp_ != null) {
output.writeMessage(14, getTp());
}
for (int i = 0; i < typeProtos_.size(); i++) {
output.writeMessage(15, typeProtos_.get(i));
}
if (type_ != onnx.OnnxMl.AttributeProto.AttributeType.UNDEFINED.getNumber()) {
output.writeEnum(20, type_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(refAttrName_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 21, refAttrName_);
}
if (sparseTensor_ != null) {
output.writeMessage(22, getSparseTensor());
}
for (int i = 0; i < sparseTensors_.size(); i++) {
output.writeMessage(23, sparseTensors_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (java.lang.Float.floatToRawIntBits(f_) != 0) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeFloatSize(2, f_);
}
if (i_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(3, i_);
}
if (!s_.isEmpty()) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeBytesSize(4, s_);
}
if (t_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(5, getT());
}
if (g_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(6, getG());
}
{
int dataSize = 0;
dataSize = 4 * getFloatsList().size();
size += dataSize;
if (!getFloatsList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
floatsMemoizedSerializedSize = dataSize;
}
{
int dataSize = 0;
for (int i = 0; i < ints_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64SizeNoTag(ints_.getLong(i));
}
size += dataSize;
if (!getIntsList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
intsMemoizedSerializedSize = dataSize;
}
{
int dataSize = 0;
for (int i = 0; i < strings_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeBytesSizeNoTag(strings_.get(i));
}
size += dataSize;
size += 1 * getStringsList().size();
}
for (int i = 0; i < tensors_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(10, tensors_.get(i));
}
for (int i = 0; i < graphs_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(11, graphs_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(13, docString_);
}
if (tp_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(14, getTp());
}
for (int i = 0; i < typeProtos_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(15, typeProtos_.get(i));
}
if (type_ != onnx.OnnxMl.AttributeProto.AttributeType.UNDEFINED.getNumber()) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeEnumSize(20, type_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(refAttrName_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(21, refAttrName_);
}
if (sparseTensor_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(22, getSparseTensor());
}
for (int i = 0; i < sparseTensors_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(23, sparseTensors_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.AttributeProto)) {
return super.equals(obj);
}
onnx.OnnxMl.AttributeProto other = (onnx.OnnxMl.AttributeProto) obj;
if (!getName()
.equals(other.getName())) return false;
if (!getRefAttrName()
.equals(other.getRefAttrName())) return false;
if (!getDocString()
.equals(other.getDocString())) return false;
if (type_ != other.type_) return false;
if (java.lang.Float.floatToIntBits(getF())
!= java.lang.Float.floatToIntBits(
other.getF())) return false;
if (getI()
!= other.getI()) return false;
if (!getS()
.equals(other.getS())) return false;
if (hasT() != other.hasT()) return false;
if (hasT()) {
if (!getT()
.equals(other.getT())) return false;
}
if (hasG() != other.hasG()) return false;
if (hasG()) {
if (!getG()
.equals(other.getG())) return false;
}
if (hasSparseTensor() != other.hasSparseTensor()) return false;
if (hasSparseTensor()) {
if (!getSparseTensor()
.equals(other.getSparseTensor())) return false;
}
if (hasTp() != other.hasTp()) return false;
if (hasTp()) {
if (!getTp()
.equals(other.getTp())) return false;
}
if (!getFloatsList()
.equals(other.getFloatsList())) return false;
if (!getIntsList()
.equals(other.getIntsList())) return false;
if (!getStringsList()
.equals(other.getStringsList())) return false;
if (!getTensorsList()
.equals(other.getTensorsList())) return false;
if (!getGraphsList()
.equals(other.getGraphsList())) return false;
if (!getSparseTensorsList()
.equals(other.getSparseTensorsList())) return false;
if (!getTypeProtosList()
.equals(other.getTypeProtosList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REF_ATTR_NAME_FIELD_NUMBER;
hash = (53 * hash) + getRefAttrName().hashCode();
hash = (37 * hash) + DOC_STRING_FIELD_NUMBER;
hash = (53 * hash) + getDocString().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + F_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getF());
hash = (37 * hash) + I_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getI());
hash = (37 * hash) + S_FIELD_NUMBER;
hash = (53 * hash) + getS().hashCode();
if (hasT()) {
hash = (37 * hash) + T_FIELD_NUMBER;
hash = (53 * hash) + getT().hashCode();
}
if (hasG()) {
hash = (37 * hash) + G_FIELD_NUMBER;
hash = (53 * hash) + getG().hashCode();
}
if (hasSparseTensor()) {
hash = (37 * hash) + SPARSE_TENSOR_FIELD_NUMBER;
hash = (53 * hash) + getSparseTensor().hashCode();
}
if (hasTp()) {
hash = (37 * hash) + TP_FIELD_NUMBER;
hash = (53 * hash) + getTp().hashCode();
}
if (getFloatsCount() > 0) {
hash = (37 * hash) + FLOATS_FIELD_NUMBER;
hash = (53 * hash) + getFloatsList().hashCode();
}
if (getIntsCount() > 0) {
hash = (37 * hash) + INTS_FIELD_NUMBER;
hash = (53 * hash) + getIntsList().hashCode();
}
if (getStringsCount() > 0) {
hash = (37 * hash) + STRINGS_FIELD_NUMBER;
hash = (53 * hash) + getStringsList().hashCode();
}
if (getTensorsCount() > 0) {
hash = (37 * hash) + TENSORS_FIELD_NUMBER;
hash = (53 * hash) + getTensorsList().hashCode();
}
if (getGraphsCount() > 0) {
hash = (37 * hash) + GRAPHS_FIELD_NUMBER;
hash = (53 * hash) + getGraphsList().hashCode();
}
if (getSparseTensorsCount() > 0) {
hash = (37 * hash) + SPARSE_TENSORS_FIELD_NUMBER;
hash = (53 * hash) + getSparseTensorsList().hashCode();
}
if (getTypeProtosCount() > 0) {
hash = (37 * hash) + TYPE_PROTOS_FIELD_NUMBER;
hash = (53 * hash) + getTypeProtosList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.AttributeProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.AttributeProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.AttributeProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.AttributeProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.AttributeProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.AttributeProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.AttributeProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Attributes
* A named attribute containing either singular float, integer, string, graph,
* and tensor values, or repeated float, integer, string, graph, and tensor values.
* An AttributeProto MUST contain the name field, and *only one* of the
* following content fields, effectively enforcing a C/C++ union equivalent.
*
*
* Protobuf type {@code onnx.AttributeProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.AttributeProto)
onnx.OnnxMl.AttributeProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_AttributeProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_AttributeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.AttributeProto.class, onnx.OnnxMl.AttributeProto.Builder.class);
}
// Construct using onnx.OnnxMl.AttributeProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTensorsFieldBuilder();
getGraphsFieldBuilder();
getSparseTensorsFieldBuilder();
getTypeProtosFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
refAttrName_ = "";
docString_ = "";
type_ = 0;
f_ = 0F;
i_ = 0L;
s_ = org.nd4j.shade.protobuf.ByteString.EMPTY;
if (tBuilder_ == null) {
t_ = null;
} else {
t_ = null;
tBuilder_ = null;
}
if (gBuilder_ == null) {
g_ = null;
} else {
g_ = null;
gBuilder_ = null;
}
if (sparseTensorBuilder_ == null) {
sparseTensor_ = null;
} else {
sparseTensor_ = null;
sparseTensorBuilder_ = null;
}
if (tpBuilder_ == null) {
tp_ = null;
} else {
tp_ = null;
tpBuilder_ = null;
}
floats_ = emptyFloatList();
bitField0_ = (bitField0_ & ~0x00000001);
ints_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000002);
strings_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
if (tensorsBuilder_ == null) {
tensors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
tensorsBuilder_.clear();
}
if (graphsBuilder_ == null) {
graphs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
} else {
graphsBuilder_.clear();
}
if (sparseTensorsBuilder_ == null) {
sparseTensors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
} else {
sparseTensorsBuilder_.clear();
}
if (typeProtosBuilder_ == null) {
typeProtos_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
} else {
typeProtosBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_AttributeProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.AttributeProto getDefaultInstanceForType() {
return onnx.OnnxMl.AttributeProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.AttributeProto build() {
onnx.OnnxMl.AttributeProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.AttributeProto buildPartial() {
onnx.OnnxMl.AttributeProto result = new onnx.OnnxMl.AttributeProto(this);
int from_bitField0_ = bitField0_;
result.name_ = name_;
result.refAttrName_ = refAttrName_;
result.docString_ = docString_;
result.type_ = type_;
result.f_ = f_;
result.i_ = i_;
result.s_ = s_;
if (tBuilder_ == null) {
result.t_ = t_;
} else {
result.t_ = tBuilder_.build();
}
if (gBuilder_ == null) {
result.g_ = g_;
} else {
result.g_ = gBuilder_.build();
}
if (sparseTensorBuilder_ == null) {
result.sparseTensor_ = sparseTensor_;
} else {
result.sparseTensor_ = sparseTensorBuilder_.build();
}
if (tpBuilder_ == null) {
result.tp_ = tp_;
} else {
result.tp_ = tpBuilder_.build();
}
if (((bitField0_ & 0x00000001) != 0)) {
floats_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.floats_ = floats_;
if (((bitField0_ & 0x00000002) != 0)) {
ints_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.ints_ = ints_;
if (((bitField0_ & 0x00000004) != 0)) {
strings_ = java.util.Collections.unmodifiableList(strings_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.strings_ = strings_;
if (tensorsBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)) {
tensors_ = java.util.Collections.unmodifiableList(tensors_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.tensors_ = tensors_;
} else {
result.tensors_ = tensorsBuilder_.build();
}
if (graphsBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0)) {
graphs_ = java.util.Collections.unmodifiableList(graphs_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.graphs_ = graphs_;
} else {
result.graphs_ = graphsBuilder_.build();
}
if (sparseTensorsBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
sparseTensors_ = java.util.Collections.unmodifiableList(sparseTensors_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.sparseTensors_ = sparseTensors_;
} else {
result.sparseTensors_ = sparseTensorsBuilder_.build();
}
if (typeProtosBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0)) {
typeProtos_ = java.util.Collections.unmodifiableList(typeProtos_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.typeProtos_ = typeProtos_;
} else {
result.typeProtos_ = typeProtosBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.AttributeProto) {
return mergeFrom((onnx.OnnxMl.AttributeProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.AttributeProto other) {
if (other == onnx.OnnxMl.AttributeProto.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getRefAttrName().isEmpty()) {
refAttrName_ = other.refAttrName_;
onChanged();
}
if (!other.getDocString().isEmpty()) {
docString_ = other.docString_;
onChanged();
}
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.getF() != 0F) {
setF(other.getF());
}
if (other.getI() != 0L) {
setI(other.getI());
}
if (other.getS() != org.nd4j.shade.protobuf.ByteString.EMPTY) {
setS(other.getS());
}
if (other.hasT()) {
mergeT(other.getT());
}
if (other.hasG()) {
mergeG(other.getG());
}
if (other.hasSparseTensor()) {
mergeSparseTensor(other.getSparseTensor());
}
if (other.hasTp()) {
mergeTp(other.getTp());
}
if (!other.floats_.isEmpty()) {
if (floats_.isEmpty()) {
floats_ = other.floats_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFloatsIsMutable();
floats_.addAll(other.floats_);
}
onChanged();
}
if (!other.ints_.isEmpty()) {
if (ints_.isEmpty()) {
ints_ = other.ints_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureIntsIsMutable();
ints_.addAll(other.ints_);
}
onChanged();
}
if (!other.strings_.isEmpty()) {
if (strings_.isEmpty()) {
strings_ = other.strings_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureStringsIsMutable();
strings_.addAll(other.strings_);
}
onChanged();
}
if (tensorsBuilder_ == null) {
if (!other.tensors_.isEmpty()) {
if (tensors_.isEmpty()) {
tensors_ = other.tensors_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureTensorsIsMutable();
tensors_.addAll(other.tensors_);
}
onChanged();
}
} else {
if (!other.tensors_.isEmpty()) {
if (tensorsBuilder_.isEmpty()) {
tensorsBuilder_.dispose();
tensorsBuilder_ = null;
tensors_ = other.tensors_;
bitField0_ = (bitField0_ & ~0x00000008);
tensorsBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTensorsFieldBuilder() : null;
} else {
tensorsBuilder_.addAllMessages(other.tensors_);
}
}
}
if (graphsBuilder_ == null) {
if (!other.graphs_.isEmpty()) {
if (graphs_.isEmpty()) {
graphs_ = other.graphs_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureGraphsIsMutable();
graphs_.addAll(other.graphs_);
}
onChanged();
}
} else {
if (!other.graphs_.isEmpty()) {
if (graphsBuilder_.isEmpty()) {
graphsBuilder_.dispose();
graphsBuilder_ = null;
graphs_ = other.graphs_;
bitField0_ = (bitField0_ & ~0x00000010);
graphsBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getGraphsFieldBuilder() : null;
} else {
graphsBuilder_.addAllMessages(other.graphs_);
}
}
}
if (sparseTensorsBuilder_ == null) {
if (!other.sparseTensors_.isEmpty()) {
if (sparseTensors_.isEmpty()) {
sparseTensors_ = other.sparseTensors_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureSparseTensorsIsMutable();
sparseTensors_.addAll(other.sparseTensors_);
}
onChanged();
}
} else {
if (!other.sparseTensors_.isEmpty()) {
if (sparseTensorsBuilder_.isEmpty()) {
sparseTensorsBuilder_.dispose();
sparseTensorsBuilder_ = null;
sparseTensors_ = other.sparseTensors_;
bitField0_ = (bitField0_ & ~0x00000020);
sparseTensorsBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSparseTensorsFieldBuilder() : null;
} else {
sparseTensorsBuilder_.addAllMessages(other.sparseTensors_);
}
}
}
if (typeProtosBuilder_ == null) {
if (!other.typeProtos_.isEmpty()) {
if (typeProtos_.isEmpty()) {
typeProtos_ = other.typeProtos_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureTypeProtosIsMutable();
typeProtos_.addAll(other.typeProtos_);
}
onChanged();
}
} else {
if (!other.typeProtos_.isEmpty()) {
if (typeProtosBuilder_.isEmpty()) {
typeProtosBuilder_.dispose();
typeProtosBuilder_ = null;
typeProtos_ = other.typeProtos_;
bitField0_ = (bitField0_ & ~0x00000040);
typeProtosBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTypeProtosFieldBuilder() : null;
} else {
typeProtosBuilder_.addAllMessages(other.typeProtos_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.AttributeProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.AttributeProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return The bytes for name.
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
* The name field MUST be present for this version of the IR.
*
*
* string name = 1;
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object refAttrName_ = "";
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return The refAttrName.
*/
public java.lang.String getRefAttrName() {
java.lang.Object ref = refAttrName_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
refAttrName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return The bytes for refAttrName.
*/
public org.nd4j.shade.protobuf.ByteString
getRefAttrNameBytes() {
java.lang.Object ref = refAttrName_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
refAttrName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @param value The refAttrName to set.
* @return This builder for chaining.
*/
public Builder setRefAttrName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
refAttrName_ = value;
onChanged();
return this;
}
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @return This builder for chaining.
*/
public Builder clearRefAttrName() {
refAttrName_ = getDefaultInstance().getRefAttrName();
onChanged();
return this;
}
/**
*
* if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
* In this case, this AttributeProto does not contain data, and it's a reference of attribute
* in parent scope.
* NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
*
*
* string ref_attr_name = 21;
* @param value The bytes for refAttrName to set.
* @return This builder for chaining.
*/
public Builder setRefAttrNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
refAttrName_ = value;
onChanged();
return this;
}
private java.lang.Object docString_ = "";
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return The docString.
*/
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return The bytes for docString.
*/
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @param value The docString to set.
* @return This builder for chaining.
*/
public Builder setDocString(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
docString_ = value;
onChanged();
return this;
}
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @return This builder for chaining.
*/
public Builder clearDocString() {
docString_ = getDefaultInstance().getDocString();
onChanged();
return this;
}
/**
*
* A human-readable documentation for this attribute. Markdown is allowed.
*
*
* string doc_string = 13;
* @param value The bytes for docString to set.
* @return This builder for chaining.
*/
public Builder setDocStringBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
docString_ = value;
onChanged();
return this;
}
private int type_ = 0;
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override public int getTypeValue() {
return type_;
}
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
onChanged();
return this;
}
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return The type.
*/
@java.lang.Override
public onnx.OnnxMl.AttributeProto.AttributeType getType() {
@SuppressWarnings("deprecation")
onnx.OnnxMl.AttributeProto.AttributeType result = onnx.OnnxMl.AttributeProto.AttributeType.valueOf(type_);
return result == null ? onnx.OnnxMl.AttributeProto.AttributeType.UNRECOGNIZED : result;
}
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(onnx.OnnxMl.AttributeProto.AttributeType value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
* The type field MUST be present for this version of the IR.
* For 0.0.1 versions of the IR, this field was not defined, and
* implementations needed to use has_field heuristics to determine
* which value field was in use. For IR_VERSION 0.0.2 or later, this
* field MUST be set and match the f|i|s|t|... field in use. This
* change was made to accommodate proto3 implementations.
*
*
* .onnx.AttributeProto.AttributeType type = 20;
* @return This builder for chaining.
*/
public Builder clearType() {
type_ = 0;
onChanged();
return this;
}
private float f_ ;
/**
*
* Exactly ONE of the following fields must be present for this version of the IR
*
*
* float f = 2;
* @return The f.
*/
@java.lang.Override
public float getF() {
return f_;
}
/**
*
* Exactly ONE of the following fields must be present for this version of the IR
*
*
* float f = 2;
* @param value The f to set.
* @return This builder for chaining.
*/
public Builder setF(float value) {
f_ = value;
onChanged();
return this;
}
/**
*
* Exactly ONE of the following fields must be present for this version of the IR
*
*
* float f = 2;
* @return This builder for chaining.
*/
public Builder clearF() {
f_ = 0F;
onChanged();
return this;
}
private long i_ ;
/**
*
* int
*
*
* int64 i = 3;
* @return The i.
*/
@java.lang.Override
public long getI() {
return i_;
}
/**
*
* int
*
*
* int64 i = 3;
* @param value The i to set.
* @return This builder for chaining.
*/
public Builder setI(long value) {
i_ = value;
onChanged();
return this;
}
/**
*
* int
*
*
* int64 i = 3;
* @return This builder for chaining.
*/
public Builder clearI() {
i_ = 0L;
onChanged();
return this;
}
private org.nd4j.shade.protobuf.ByteString s_ = org.nd4j.shade.protobuf.ByteString.EMPTY;
/**
*
* UTF-8 string
*
*
* bytes s = 4;
* @return The s.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString getS() {
return s_;
}
/**
*
* UTF-8 string
*
*
* bytes s = 4;
* @param value The s to set.
* @return This builder for chaining.
*/
public Builder setS(org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
s_ = value;
onChanged();
return this;
}
/**
*
* UTF-8 string
*
*
* bytes s = 4;
* @return This builder for chaining.
*/
public Builder clearS() {
s_ = getDefaultInstance().getS();
onChanged();
return this;
}
private onnx.OnnxMl.TensorProto t_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder> tBuilder_;
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
* @return Whether the t field is set.
*/
public boolean hasT() {
return tBuilder_ != null || t_ != null;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
* @return The t.
*/
public onnx.OnnxMl.TensorProto getT() {
if (tBuilder_ == null) {
return t_ == null ? onnx.OnnxMl.TensorProto.getDefaultInstance() : t_;
} else {
return tBuilder_.getMessage();
}
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
public Builder setT(onnx.OnnxMl.TensorProto value) {
if (tBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
t_ = value;
onChanged();
} else {
tBuilder_.setMessage(value);
}
return this;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
public Builder setT(
onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (tBuilder_ == null) {
t_ = builderForValue.build();
onChanged();
} else {
tBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
public Builder mergeT(onnx.OnnxMl.TensorProto value) {
if (tBuilder_ == null) {
if (t_ != null) {
t_ =
onnx.OnnxMl.TensorProto.newBuilder(t_).mergeFrom(value).buildPartial();
} else {
t_ = value;
}
onChanged();
} else {
tBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
public Builder clearT() {
if (tBuilder_ == null) {
t_ = null;
onChanged();
} else {
t_ = null;
tBuilder_ = null;
}
return this;
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
public onnx.OnnxMl.TensorProto.Builder getTBuilder() {
onChanged();
return getTFieldBuilder().getBuilder();
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
public onnx.OnnxMl.TensorProtoOrBuilder getTOrBuilder() {
if (tBuilder_ != null) {
return tBuilder_.getMessageOrBuilder();
} else {
return t_ == null ?
onnx.OnnxMl.TensorProto.getDefaultInstance() : t_;
}
}
/**
*
* tensor value
*
*
* .onnx.TensorProto t = 5;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder>
getTFieldBuilder() {
if (tBuilder_ == null) {
tBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder>(
getT(),
getParentForChildren(),
isClean());
t_ = null;
}
return tBuilder_;
}
private onnx.OnnxMl.GraphProto g_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder> gBuilder_;
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
* @return Whether the g field is set.
*/
public boolean hasG() {
return gBuilder_ != null || g_ != null;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
* @return The g.
*/
public onnx.OnnxMl.GraphProto getG() {
if (gBuilder_ == null) {
return g_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : g_;
} else {
return gBuilder_.getMessage();
}
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
public Builder setG(onnx.OnnxMl.GraphProto value) {
if (gBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
g_ = value;
onChanged();
} else {
gBuilder_.setMessage(value);
}
return this;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
public Builder setG(
onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (gBuilder_ == null) {
g_ = builderForValue.build();
onChanged();
} else {
gBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
public Builder mergeG(onnx.OnnxMl.GraphProto value) {
if (gBuilder_ == null) {
if (g_ != null) {
g_ =
onnx.OnnxMl.GraphProto.newBuilder(g_).mergeFrom(value).buildPartial();
} else {
g_ = value;
}
onChanged();
} else {
gBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
public Builder clearG() {
if (gBuilder_ == null) {
g_ = null;
onChanged();
} else {
g_ = null;
gBuilder_ = null;
}
return this;
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
public onnx.OnnxMl.GraphProto.Builder getGBuilder() {
onChanged();
return getGFieldBuilder().getBuilder();
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
public onnx.OnnxMl.GraphProtoOrBuilder getGOrBuilder() {
if (gBuilder_ != null) {
return gBuilder_.getMessageOrBuilder();
} else {
return g_ == null ?
onnx.OnnxMl.GraphProto.getDefaultInstance() : g_;
}
}
/**
*
* graph
*
*
* .onnx.GraphProto g = 6;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>
getGFieldBuilder() {
if (gBuilder_ == null) {
gBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>(
getG(),
getParentForChildren(),
isClean());
g_ = null;
}
return gBuilder_;
}
private onnx.OnnxMl.SparseTensorProto sparseTensor_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder> sparseTensorBuilder_;
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
* @return Whether the sparseTensor field is set.
*/
public boolean hasSparseTensor() {
return sparseTensorBuilder_ != null || sparseTensor_ != null;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
* @return The sparseTensor.
*/
public onnx.OnnxMl.SparseTensorProto getSparseTensor() {
if (sparseTensorBuilder_ == null) {
return sparseTensor_ == null ? onnx.OnnxMl.SparseTensorProto.getDefaultInstance() : sparseTensor_;
} else {
return sparseTensorBuilder_.getMessage();
}
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
public Builder setSparseTensor(onnx.OnnxMl.SparseTensorProto value) {
if (sparseTensorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sparseTensor_ = value;
onChanged();
} else {
sparseTensorBuilder_.setMessage(value);
}
return this;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
public Builder setSparseTensor(
onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseTensorBuilder_ == null) {
sparseTensor_ = builderForValue.build();
onChanged();
} else {
sparseTensorBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
public Builder mergeSparseTensor(onnx.OnnxMl.SparseTensorProto value) {
if (sparseTensorBuilder_ == null) {
if (sparseTensor_ != null) {
sparseTensor_ =
onnx.OnnxMl.SparseTensorProto.newBuilder(sparseTensor_).mergeFrom(value).buildPartial();
} else {
sparseTensor_ = value;
}
onChanged();
} else {
sparseTensorBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
public Builder clearSparseTensor() {
if (sparseTensorBuilder_ == null) {
sparseTensor_ = null;
onChanged();
} else {
sparseTensor_ = null;
sparseTensorBuilder_ = null;
}
return this;
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
public onnx.OnnxMl.SparseTensorProto.Builder getSparseTensorBuilder() {
onChanged();
return getSparseTensorFieldBuilder().getBuilder();
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
public onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseTensorOrBuilder() {
if (sparseTensorBuilder_ != null) {
return sparseTensorBuilder_.getMessageOrBuilder();
} else {
return sparseTensor_ == null ?
onnx.OnnxMl.SparseTensorProto.getDefaultInstance() : sparseTensor_;
}
}
/**
*
* sparse tensor value
*
*
* .onnx.SparseTensorProto sparse_tensor = 22;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder>
getSparseTensorFieldBuilder() {
if (sparseTensorBuilder_ == null) {
sparseTensorBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder>(
getSparseTensor(),
getParentForChildren(),
isClean());
sparseTensor_ = null;
}
return sparseTensorBuilder_;
}
private onnx.OnnxMl.TypeProto tp_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder> tpBuilder_;
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
* @return Whether the tp field is set.
*/
public boolean hasTp() {
return tpBuilder_ != null || tp_ != null;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
* @return The tp.
*/
public onnx.OnnxMl.TypeProto getTp() {
if (tpBuilder_ == null) {
return tp_ == null ? onnx.OnnxMl.TypeProto.getDefaultInstance() : tp_;
} else {
return tpBuilder_.getMessage();
}
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
public Builder setTp(onnx.OnnxMl.TypeProto value) {
if (tpBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tp_ = value;
onChanged();
} else {
tpBuilder_.setMessage(value);
}
return this;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
public Builder setTp(
onnx.OnnxMl.TypeProto.Builder builderForValue) {
if (tpBuilder_ == null) {
tp_ = builderForValue.build();
onChanged();
} else {
tpBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
public Builder mergeTp(onnx.OnnxMl.TypeProto value) {
if (tpBuilder_ == null) {
if (tp_ != null) {
tp_ =
onnx.OnnxMl.TypeProto.newBuilder(tp_).mergeFrom(value).buildPartial();
} else {
tp_ = value;
}
onChanged();
} else {
tpBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
public Builder clearTp() {
if (tpBuilder_ == null) {
tp_ = null;
onChanged();
} else {
tp_ = null;
tpBuilder_ = null;
}
return this;
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
public onnx.OnnxMl.TypeProto.Builder getTpBuilder() {
onChanged();
return getTpFieldBuilder().getBuilder();
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
public onnx.OnnxMl.TypeProtoOrBuilder getTpOrBuilder() {
if (tpBuilder_ != null) {
return tpBuilder_.getMessageOrBuilder();
} else {
return tp_ == null ?
onnx.OnnxMl.TypeProto.getDefaultInstance() : tp_;
}
}
/**
*
* Do not use field below, it's deprecated.
* optional ValueProto v = 12; // value - subsumes everything but graph
*
*
* .onnx.TypeProto tp = 14;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder>
getTpFieldBuilder() {
if (tpBuilder_ == null) {
tpBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder>(
getTp(),
getParentForChildren(),
isClean());
tp_ = null;
}
return tpBuilder_;
}
private org.nd4j.shade.protobuf.Internal.FloatList floats_ = emptyFloatList();
private void ensureFloatsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
floats_ = mutableCopy(floats_);
bitField0_ |= 0x00000001;
}
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return A list containing the floats.
*/
public java.util.List
getFloatsList() {
return ((bitField0_ & 0x00000001) != 0) ?
java.util.Collections.unmodifiableList(floats_) : floats_;
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return The count of floats.
*/
public int getFloatsCount() {
return floats_.size();
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @param index The index of the element to return.
* @return The floats at the given index.
*/
public float getFloats(int index) {
return floats_.getFloat(index);
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @param index The index to set the value at.
* @param value The floats to set.
* @return This builder for chaining.
*/
public Builder setFloats(
int index, float value) {
ensureFloatsIsMutable();
floats_.setFloat(index, value);
onChanged();
return this;
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @param value The floats to add.
* @return This builder for chaining.
*/
public Builder addFloats(float value) {
ensureFloatsIsMutable();
floats_.addFloat(value);
onChanged();
return this;
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @param values The floats to add.
* @return This builder for chaining.
*/
public Builder addAllFloats(
java.lang.Iterable values) {
ensureFloatsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, floats_);
onChanged();
return this;
}
/**
*
* list of floats
*
*
* repeated float floats = 7;
* @return This builder for chaining.
*/
public Builder clearFloats() {
floats_ = emptyFloatList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
private org.nd4j.shade.protobuf.Internal.LongList ints_ = emptyLongList();
private void ensureIntsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
ints_ = mutableCopy(ints_);
bitField0_ |= 0x00000002;
}
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return A list containing the ints.
*/
public java.util.List
getIntsList() {
return ((bitField0_ & 0x00000002) != 0) ?
java.util.Collections.unmodifiableList(ints_) : ints_;
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return The count of ints.
*/
public int getIntsCount() {
return ints_.size();
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @param index The index of the element to return.
* @return The ints at the given index.
*/
public long getInts(int index) {
return ints_.getLong(index);
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @param index The index to set the value at.
* @param value The ints to set.
* @return This builder for chaining.
*/
public Builder setInts(
int index, long value) {
ensureIntsIsMutable();
ints_.setLong(index, value);
onChanged();
return this;
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @param value The ints to add.
* @return This builder for chaining.
*/
public Builder addInts(long value) {
ensureIntsIsMutable();
ints_.addLong(value);
onChanged();
return this;
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @param values The ints to add.
* @return This builder for chaining.
*/
public Builder addAllInts(
java.lang.Iterable values) {
ensureIntsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, ints_);
onChanged();
return this;
}
/**
*
* list of ints
*
*
* repeated int64 ints = 8;
* @return This builder for chaining.
*/
public Builder clearInts() {
ints_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
private java.util.List strings_ = java.util.Collections.emptyList();
private void ensureStringsIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
strings_ = new java.util.ArrayList(strings_);
bitField0_ |= 0x00000004;
}
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return A list containing the strings.
*/
public java.util.List
getStringsList() {
return ((bitField0_ & 0x00000004) != 0) ?
java.util.Collections.unmodifiableList(strings_) : strings_;
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return The count of strings.
*/
public int getStringsCount() {
return strings_.size();
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @param index The index of the element to return.
* @return The strings at the given index.
*/
public org.nd4j.shade.protobuf.ByteString getStrings(int index) {
return strings_.get(index);
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @param index The index to set the value at.
* @param value The strings to set.
* @return This builder for chaining.
*/
public Builder setStrings(
int index, org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureStringsIsMutable();
strings_.set(index, value);
onChanged();
return this;
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @param value The strings to add.
* @return This builder for chaining.
*/
public Builder addStrings(org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureStringsIsMutable();
strings_.add(value);
onChanged();
return this;
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @param values The strings to add.
* @return This builder for chaining.
*/
public Builder addAllStrings(
java.lang.Iterable values) {
ensureStringsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, strings_);
onChanged();
return this;
}
/**
*
* list of UTF-8 strings
*
*
* repeated bytes strings = 9;
* @return This builder for chaining.
*/
public Builder clearStrings() {
strings_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
private java.util.List tensors_ =
java.util.Collections.emptyList();
private void ensureTensorsIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
tensors_ = new java.util.ArrayList(tensors_);
bitField0_ |= 0x00000008;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder> tensorsBuilder_;
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public java.util.List getTensorsList() {
if (tensorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(tensors_);
} else {
return tensorsBuilder_.getMessageList();
}
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public int getTensorsCount() {
if (tensorsBuilder_ == null) {
return tensors_.size();
} else {
return tensorsBuilder_.getCount();
}
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public onnx.OnnxMl.TensorProto getTensors(int index) {
if (tensorsBuilder_ == null) {
return tensors_.get(index);
} else {
return tensorsBuilder_.getMessage(index);
}
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder setTensors(
int index, onnx.OnnxMl.TensorProto value) {
if (tensorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorsIsMutable();
tensors_.set(index, value);
onChanged();
} else {
tensorsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder setTensors(
int index, onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (tensorsBuilder_ == null) {
ensureTensorsIsMutable();
tensors_.set(index, builderForValue.build());
onChanged();
} else {
tensorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder addTensors(onnx.OnnxMl.TensorProto value) {
if (tensorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorsIsMutable();
tensors_.add(value);
onChanged();
} else {
tensorsBuilder_.addMessage(value);
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder addTensors(
int index, onnx.OnnxMl.TensorProto value) {
if (tensorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorsIsMutable();
tensors_.add(index, value);
onChanged();
} else {
tensorsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder addTensors(
onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (tensorsBuilder_ == null) {
ensureTensorsIsMutable();
tensors_.add(builderForValue.build());
onChanged();
} else {
tensorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder addTensors(
int index, onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (tensorsBuilder_ == null) {
ensureTensorsIsMutable();
tensors_.add(index, builderForValue.build());
onChanged();
} else {
tensorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder addAllTensors(
java.lang.Iterable values) {
if (tensorsBuilder_ == null) {
ensureTensorsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, tensors_);
onChanged();
} else {
tensorsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder clearTensors() {
if (tensorsBuilder_ == null) {
tensors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
tensorsBuilder_.clear();
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public Builder removeTensors(int index) {
if (tensorsBuilder_ == null) {
ensureTensorsIsMutable();
tensors_.remove(index);
onChanged();
} else {
tensorsBuilder_.remove(index);
}
return this;
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public onnx.OnnxMl.TensorProto.Builder getTensorsBuilder(
int index) {
return getTensorsFieldBuilder().getBuilder(index);
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public onnx.OnnxMl.TensorProtoOrBuilder getTensorsOrBuilder(
int index) {
if (tensorsBuilder_ == null) {
return tensors_.get(index); } else {
return tensorsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public java.util.List
getTensorsOrBuilderList() {
if (tensorsBuilder_ != null) {
return tensorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tensors_);
}
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public onnx.OnnxMl.TensorProto.Builder addTensorsBuilder() {
return getTensorsFieldBuilder().addBuilder(
onnx.OnnxMl.TensorProto.getDefaultInstance());
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public onnx.OnnxMl.TensorProto.Builder addTensorsBuilder(
int index) {
return getTensorsFieldBuilder().addBuilder(
index, onnx.OnnxMl.TensorProto.getDefaultInstance());
}
/**
*
* list of tensors
*
*
* repeated .onnx.TensorProto tensors = 10;
*/
public java.util.List
getTensorsBuilderList() {
return getTensorsFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder>
getTensorsFieldBuilder() {
if (tensorsBuilder_ == null) {
tensorsBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder>(
tensors_,
((bitField0_ & 0x00000008) != 0),
getParentForChildren(),
isClean());
tensors_ = null;
}
return tensorsBuilder_;
}
private java.util.List graphs_ =
java.util.Collections.emptyList();
private void ensureGraphsIsMutable() {
if (!((bitField0_ & 0x00000010) != 0)) {
graphs_ = new java.util.ArrayList(graphs_);
bitField0_ |= 0x00000010;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder> graphsBuilder_;
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public java.util.List getGraphsList() {
if (graphsBuilder_ == null) {
return java.util.Collections.unmodifiableList(graphs_);
} else {
return graphsBuilder_.getMessageList();
}
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public int getGraphsCount() {
if (graphsBuilder_ == null) {
return graphs_.size();
} else {
return graphsBuilder_.getCount();
}
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public onnx.OnnxMl.GraphProto getGraphs(int index) {
if (graphsBuilder_ == null) {
return graphs_.get(index);
} else {
return graphsBuilder_.getMessage(index);
}
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder setGraphs(
int index, onnx.OnnxMl.GraphProto value) {
if (graphsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGraphsIsMutable();
graphs_.set(index, value);
onChanged();
} else {
graphsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder setGraphs(
int index, onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (graphsBuilder_ == null) {
ensureGraphsIsMutable();
graphs_.set(index, builderForValue.build());
onChanged();
} else {
graphsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder addGraphs(onnx.OnnxMl.GraphProto value) {
if (graphsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGraphsIsMutable();
graphs_.add(value);
onChanged();
} else {
graphsBuilder_.addMessage(value);
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder addGraphs(
int index, onnx.OnnxMl.GraphProto value) {
if (graphsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGraphsIsMutable();
graphs_.add(index, value);
onChanged();
} else {
graphsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder addGraphs(
onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (graphsBuilder_ == null) {
ensureGraphsIsMutable();
graphs_.add(builderForValue.build());
onChanged();
} else {
graphsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder addGraphs(
int index, onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (graphsBuilder_ == null) {
ensureGraphsIsMutable();
graphs_.add(index, builderForValue.build());
onChanged();
} else {
graphsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder addAllGraphs(
java.lang.Iterable values) {
if (graphsBuilder_ == null) {
ensureGraphsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, graphs_);
onChanged();
} else {
graphsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder clearGraphs() {
if (graphsBuilder_ == null) {
graphs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
graphsBuilder_.clear();
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public Builder removeGraphs(int index) {
if (graphsBuilder_ == null) {
ensureGraphsIsMutable();
graphs_.remove(index);
onChanged();
} else {
graphsBuilder_.remove(index);
}
return this;
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public onnx.OnnxMl.GraphProto.Builder getGraphsBuilder(
int index) {
return getGraphsFieldBuilder().getBuilder(index);
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public onnx.OnnxMl.GraphProtoOrBuilder getGraphsOrBuilder(
int index) {
if (graphsBuilder_ == null) {
return graphs_.get(index); } else {
return graphsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public java.util.List
getGraphsOrBuilderList() {
if (graphsBuilder_ != null) {
return graphsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(graphs_);
}
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public onnx.OnnxMl.GraphProto.Builder addGraphsBuilder() {
return getGraphsFieldBuilder().addBuilder(
onnx.OnnxMl.GraphProto.getDefaultInstance());
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public onnx.OnnxMl.GraphProto.Builder addGraphsBuilder(
int index) {
return getGraphsFieldBuilder().addBuilder(
index, onnx.OnnxMl.GraphProto.getDefaultInstance());
}
/**
*
* list of graph
*
*
* repeated .onnx.GraphProto graphs = 11;
*/
public java.util.List
getGraphsBuilderList() {
return getGraphsFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>
getGraphsFieldBuilder() {
if (graphsBuilder_ == null) {
graphsBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>(
graphs_,
((bitField0_ & 0x00000010) != 0),
getParentForChildren(),
isClean());
graphs_ = null;
}
return graphsBuilder_;
}
private java.util.List sparseTensors_ =
java.util.Collections.emptyList();
private void ensureSparseTensorsIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
sparseTensors_ = new java.util.ArrayList(sparseTensors_);
bitField0_ |= 0x00000020;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder> sparseTensorsBuilder_;
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public java.util.List getSparseTensorsList() {
if (sparseTensorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(sparseTensors_);
} else {
return sparseTensorsBuilder_.getMessageList();
}
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public int getSparseTensorsCount() {
if (sparseTensorsBuilder_ == null) {
return sparseTensors_.size();
} else {
return sparseTensorsBuilder_.getCount();
}
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public onnx.OnnxMl.SparseTensorProto getSparseTensors(int index) {
if (sparseTensorsBuilder_ == null) {
return sparseTensors_.get(index);
} else {
return sparseTensorsBuilder_.getMessage(index);
}
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder setSparseTensors(
int index, onnx.OnnxMl.SparseTensorProto value) {
if (sparseTensorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSparseTensorsIsMutable();
sparseTensors_.set(index, value);
onChanged();
} else {
sparseTensorsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder setSparseTensors(
int index, onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseTensorsBuilder_ == null) {
ensureSparseTensorsIsMutable();
sparseTensors_.set(index, builderForValue.build());
onChanged();
} else {
sparseTensorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder addSparseTensors(onnx.OnnxMl.SparseTensorProto value) {
if (sparseTensorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSparseTensorsIsMutable();
sparseTensors_.add(value);
onChanged();
} else {
sparseTensorsBuilder_.addMessage(value);
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder addSparseTensors(
int index, onnx.OnnxMl.SparseTensorProto value) {
if (sparseTensorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSparseTensorsIsMutable();
sparseTensors_.add(index, value);
onChanged();
} else {
sparseTensorsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder addSparseTensors(
onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseTensorsBuilder_ == null) {
ensureSparseTensorsIsMutable();
sparseTensors_.add(builderForValue.build());
onChanged();
} else {
sparseTensorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder addSparseTensors(
int index, onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseTensorsBuilder_ == null) {
ensureSparseTensorsIsMutable();
sparseTensors_.add(index, builderForValue.build());
onChanged();
} else {
sparseTensorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder addAllSparseTensors(
java.lang.Iterable values) {
if (sparseTensorsBuilder_ == null) {
ensureSparseTensorsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, sparseTensors_);
onChanged();
} else {
sparseTensorsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder clearSparseTensors() {
if (sparseTensorsBuilder_ == null) {
sparseTensors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
sparseTensorsBuilder_.clear();
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public Builder removeSparseTensors(int index) {
if (sparseTensorsBuilder_ == null) {
ensureSparseTensorsIsMutable();
sparseTensors_.remove(index);
onChanged();
} else {
sparseTensorsBuilder_.remove(index);
}
return this;
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public onnx.OnnxMl.SparseTensorProto.Builder getSparseTensorsBuilder(
int index) {
return getSparseTensorsFieldBuilder().getBuilder(index);
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseTensorsOrBuilder(
int index) {
if (sparseTensorsBuilder_ == null) {
return sparseTensors_.get(index); } else {
return sparseTensorsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public java.util.List
getSparseTensorsOrBuilderList() {
if (sparseTensorsBuilder_ != null) {
return sparseTensorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sparseTensors_);
}
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public onnx.OnnxMl.SparseTensorProto.Builder addSparseTensorsBuilder() {
return getSparseTensorsFieldBuilder().addBuilder(
onnx.OnnxMl.SparseTensorProto.getDefaultInstance());
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public onnx.OnnxMl.SparseTensorProto.Builder addSparseTensorsBuilder(
int index) {
return getSparseTensorsFieldBuilder().addBuilder(
index, onnx.OnnxMl.SparseTensorProto.getDefaultInstance());
}
/**
*
* list of sparse tensors
*
*
* repeated .onnx.SparseTensorProto sparse_tensors = 23;
*/
public java.util.List
getSparseTensorsBuilderList() {
return getSparseTensorsFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder>
getSparseTensorsFieldBuilder() {
if (sparseTensorsBuilder_ == null) {
sparseTensorsBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder>(
sparseTensors_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
sparseTensors_ = null;
}
return sparseTensorsBuilder_;
}
private java.util.List typeProtos_ =
java.util.Collections.emptyList();
private void ensureTypeProtosIsMutable() {
if (!((bitField0_ & 0x00000040) != 0)) {
typeProtos_ = new java.util.ArrayList(typeProtos_);
bitField0_ |= 0x00000040;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder> typeProtosBuilder_;
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public java.util.List getTypeProtosList() {
if (typeProtosBuilder_ == null) {
return java.util.Collections.unmodifiableList(typeProtos_);
} else {
return typeProtosBuilder_.getMessageList();
}
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public int getTypeProtosCount() {
if (typeProtosBuilder_ == null) {
return typeProtos_.size();
} else {
return typeProtosBuilder_.getCount();
}
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public onnx.OnnxMl.TypeProto getTypeProtos(int index) {
if (typeProtosBuilder_ == null) {
return typeProtos_.get(index);
} else {
return typeProtosBuilder_.getMessage(index);
}
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder setTypeProtos(
int index, onnx.OnnxMl.TypeProto value) {
if (typeProtosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTypeProtosIsMutable();
typeProtos_.set(index, value);
onChanged();
} else {
typeProtosBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder setTypeProtos(
int index, onnx.OnnxMl.TypeProto.Builder builderForValue) {
if (typeProtosBuilder_ == null) {
ensureTypeProtosIsMutable();
typeProtos_.set(index, builderForValue.build());
onChanged();
} else {
typeProtosBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder addTypeProtos(onnx.OnnxMl.TypeProto value) {
if (typeProtosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTypeProtosIsMutable();
typeProtos_.add(value);
onChanged();
} else {
typeProtosBuilder_.addMessage(value);
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder addTypeProtos(
int index, onnx.OnnxMl.TypeProto value) {
if (typeProtosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTypeProtosIsMutable();
typeProtos_.add(index, value);
onChanged();
} else {
typeProtosBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder addTypeProtos(
onnx.OnnxMl.TypeProto.Builder builderForValue) {
if (typeProtosBuilder_ == null) {
ensureTypeProtosIsMutable();
typeProtos_.add(builderForValue.build());
onChanged();
} else {
typeProtosBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder addTypeProtos(
int index, onnx.OnnxMl.TypeProto.Builder builderForValue) {
if (typeProtosBuilder_ == null) {
ensureTypeProtosIsMutable();
typeProtos_.add(index, builderForValue.build());
onChanged();
} else {
typeProtosBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder addAllTypeProtos(
java.lang.Iterable values) {
if (typeProtosBuilder_ == null) {
ensureTypeProtosIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, typeProtos_);
onChanged();
} else {
typeProtosBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder clearTypeProtos() {
if (typeProtosBuilder_ == null) {
typeProtos_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
} else {
typeProtosBuilder_.clear();
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public Builder removeTypeProtos(int index) {
if (typeProtosBuilder_ == null) {
ensureTypeProtosIsMutable();
typeProtos_.remove(index);
onChanged();
} else {
typeProtosBuilder_.remove(index);
}
return this;
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public onnx.OnnxMl.TypeProto.Builder getTypeProtosBuilder(
int index) {
return getTypeProtosFieldBuilder().getBuilder(index);
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public onnx.OnnxMl.TypeProtoOrBuilder getTypeProtosOrBuilder(
int index) {
if (typeProtosBuilder_ == null) {
return typeProtos_.get(index); } else {
return typeProtosBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public java.util.List
getTypeProtosOrBuilderList() {
if (typeProtosBuilder_ != null) {
return typeProtosBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(typeProtos_);
}
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public onnx.OnnxMl.TypeProto.Builder addTypeProtosBuilder() {
return getTypeProtosFieldBuilder().addBuilder(
onnx.OnnxMl.TypeProto.getDefaultInstance());
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public onnx.OnnxMl.TypeProto.Builder addTypeProtosBuilder(
int index) {
return getTypeProtosFieldBuilder().addBuilder(
index, onnx.OnnxMl.TypeProto.getDefaultInstance());
}
/**
*
* list of type protos
*
*
* repeated .onnx.TypeProto type_protos = 15;
*/
public java.util.List
getTypeProtosBuilderList() {
return getTypeProtosFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder>
getTypeProtosFieldBuilder() {
if (typeProtosBuilder_ == null) {
typeProtosBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder>(
typeProtos_,
((bitField0_ & 0x00000040) != 0),
getParentForChildren(),
isClean());
typeProtos_ = null;
}
return typeProtosBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.AttributeProto)
}
// @@protoc_insertion_point(class_scope:onnx.AttributeProto)
private static final onnx.OnnxMl.AttributeProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.AttributeProto();
}
public static onnx.OnnxMl.AttributeProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public AttributeProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new AttributeProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.AttributeProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ValueInfoProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.ValueInfoProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return The name.
*/
java.lang.String getName();
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return The bytes for name.
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
* @return Whether the type field is set.
*/
boolean hasType();
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
* @return The type.
*/
onnx.OnnxMl.TypeProto getType();
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
onnx.OnnxMl.TypeProtoOrBuilder getTypeOrBuilder();
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return The docString.
*/
java.lang.String getDocString();
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return The bytes for docString.
*/
org.nd4j.shade.protobuf.ByteString
getDocStringBytes();
}
/**
*
* Defines information on value, including the name, the type, and
* the shape of the value.
*
*
* Protobuf type {@code onnx.ValueInfoProto}
*/
public static final class ValueInfoProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.ValueInfoProto)
ValueInfoProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ValueInfoProto.newBuilder() to construct.
private ValueInfoProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private ValueInfoProto() {
name_ = "";
docString_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ValueInfoProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ValueInfoProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18: {
onnx.OnnxMl.TypeProto.Builder subBuilder = null;
if (type_ != null) {
subBuilder = type_.toBuilder();
}
type_ = input.readMessage(onnx.OnnxMl.TypeProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(type_);
type_ = subBuilder.buildPartial();
}
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_ValueInfoProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_ValueInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.ValueInfoProto.class, onnx.OnnxMl.ValueInfoProto.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return The bytes for name.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 2;
private onnx.OnnxMl.TypeProto type_;
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
* @return Whether the type field is set.
*/
@java.lang.Override
public boolean hasType() {
return type_ != null;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
* @return The type.
*/
@java.lang.Override
public onnx.OnnxMl.TypeProto getType() {
return type_ == null ? onnx.OnnxMl.TypeProto.getDefaultInstance() : type_;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
@java.lang.Override
public onnx.OnnxMl.TypeProtoOrBuilder getTypeOrBuilder() {
return getType();
}
public static final int DOC_STRING_FIELD_NUMBER = 3;
private volatile java.lang.Object docString_;
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return The docString.
*/
@java.lang.Override
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
}
}
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return The bytes for docString.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (type_ != null) {
output.writeMessage(2, getType());
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 3, docString_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (type_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(2, getType());
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(3, docString_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.ValueInfoProto)) {
return super.equals(obj);
}
onnx.OnnxMl.ValueInfoProto other = (onnx.OnnxMl.ValueInfoProto) obj;
if (!getName()
.equals(other.getName())) return false;
if (hasType() != other.hasType()) return false;
if (hasType()) {
if (!getType()
.equals(other.getType())) return false;
}
if (!getDocString()
.equals(other.getDocString())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasType()) {
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + getType().hashCode();
}
hash = (37 * hash) + DOC_STRING_FIELD_NUMBER;
hash = (53 * hash) + getDocString().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.ValueInfoProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.ValueInfoProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.ValueInfoProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.ValueInfoProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Defines information on value, including the name, the type, and
* the shape of the value.
*
*
* Protobuf type {@code onnx.ValueInfoProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.ValueInfoProto)
onnx.OnnxMl.ValueInfoProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_ValueInfoProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_ValueInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.ValueInfoProto.class, onnx.OnnxMl.ValueInfoProto.Builder.class);
}
// Construct using onnx.OnnxMl.ValueInfoProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
if (typeBuilder_ == null) {
type_ = null;
} else {
type_ = null;
typeBuilder_ = null;
}
docString_ = "";
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_ValueInfoProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto getDefaultInstanceForType() {
return onnx.OnnxMl.ValueInfoProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto build() {
onnx.OnnxMl.ValueInfoProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto buildPartial() {
onnx.OnnxMl.ValueInfoProto result = new onnx.OnnxMl.ValueInfoProto(this);
result.name_ = name_;
if (typeBuilder_ == null) {
result.type_ = type_;
} else {
result.type_ = typeBuilder_.build();
}
result.docString_ = docString_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.ValueInfoProto) {
return mergeFrom((onnx.OnnxMl.ValueInfoProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.ValueInfoProto other) {
if (other == onnx.OnnxMl.ValueInfoProto.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.hasType()) {
mergeType(other.getType());
}
if (!other.getDocString().isEmpty()) {
docString_ = other.docString_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.ValueInfoProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.ValueInfoProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return The bytes for name.
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
* This field MUST be present in this version of the IR.
*
*
* string name = 1;
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private onnx.OnnxMl.TypeProto type_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder> typeBuilder_;
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
* @return Whether the type field is set.
*/
public boolean hasType() {
return typeBuilder_ != null || type_ != null;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
* @return The type.
*/
public onnx.OnnxMl.TypeProto getType() {
if (typeBuilder_ == null) {
return type_ == null ? onnx.OnnxMl.TypeProto.getDefaultInstance() : type_;
} else {
return typeBuilder_.getMessage();
}
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
public Builder setType(onnx.OnnxMl.TypeProto value) {
if (typeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
type_ = value;
onChanged();
} else {
typeBuilder_.setMessage(value);
}
return this;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
public Builder setType(
onnx.OnnxMl.TypeProto.Builder builderForValue) {
if (typeBuilder_ == null) {
type_ = builderForValue.build();
onChanged();
} else {
typeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
public Builder mergeType(onnx.OnnxMl.TypeProto value) {
if (typeBuilder_ == null) {
if (type_ != null) {
type_ =
onnx.OnnxMl.TypeProto.newBuilder(type_).mergeFrom(value).buildPartial();
} else {
type_ = value;
}
onChanged();
} else {
typeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
public Builder clearType() {
if (typeBuilder_ == null) {
type_ = null;
onChanged();
} else {
type_ = null;
typeBuilder_ = null;
}
return this;
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
public onnx.OnnxMl.TypeProto.Builder getTypeBuilder() {
onChanged();
return getTypeFieldBuilder().getBuilder();
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
public onnx.OnnxMl.TypeProtoOrBuilder getTypeOrBuilder() {
if (typeBuilder_ != null) {
return typeBuilder_.getMessageOrBuilder();
} else {
return type_ == null ?
onnx.OnnxMl.TypeProto.getDefaultInstance() : type_;
}
}
/**
*
* This field MUST be present in this version of the IR for
* inputs and outputs of the top-level graph.
*
*
* .onnx.TypeProto type = 2;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder>
getTypeFieldBuilder() {
if (typeBuilder_ == null) {
typeBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TypeProto, onnx.OnnxMl.TypeProto.Builder, onnx.OnnxMl.TypeProtoOrBuilder>(
getType(),
getParentForChildren(),
isClean());
type_ = null;
}
return typeBuilder_;
}
private java.lang.Object docString_ = "";
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return The docString.
*/
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return The bytes for docString.
*/
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @param value The docString to set.
* @return This builder for chaining.
*/
public Builder setDocString(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
docString_ = value;
onChanged();
return this;
}
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @return This builder for chaining.
*/
public Builder clearDocString() {
docString_ = getDefaultInstance().getDocString();
onChanged();
return this;
}
/**
*
* A human-readable documentation for this value. Markdown is allowed.
*
*
* string doc_string = 3;
* @param value The bytes for docString to set.
* @return This builder for chaining.
*/
public Builder setDocStringBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
docString_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.ValueInfoProto)
}
// @@protoc_insertion_point(class_scope:onnx.ValueInfoProto)
private static final onnx.OnnxMl.ValueInfoProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.ValueInfoProto();
}
public static onnx.OnnxMl.ValueInfoProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public ValueInfoProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new ValueInfoProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface NodeProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.NodeProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return A list containing the input.
*/
java.util.List
getInputList();
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return The count of input.
*/
int getInputCount();
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index of the element to return.
* @return The input at the given index.
*/
java.lang.String getInput(int index);
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index of the value to return.
* @return The bytes of the input at the given index.
*/
org.nd4j.shade.protobuf.ByteString
getInputBytes(int index);
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return A list containing the output.
*/
java.util.List
getOutputList();
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return The count of output.
*/
int getOutputCount();
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index of the element to return.
* @return The output at the given index.
*/
java.lang.String getOutput(int index);
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index of the value to return.
* @return The bytes of the output at the given index.
*/
org.nd4j.shade.protobuf.ByteString
getOutputBytes(int index);
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return The name.
*/
java.lang.String getName();
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return The bytes for name.
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return The opType.
*/
java.lang.String getOpType();
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return The bytes for opType.
*/
org.nd4j.shade.protobuf.ByteString
getOpTypeBytes();
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return The domain.
*/
java.lang.String getDomain();
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return The bytes for domain.
*/
org.nd4j.shade.protobuf.ByteString
getDomainBytes();
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
java.util.List
getAttributeList();
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
onnx.OnnxMl.AttributeProto getAttribute(int index);
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
int getAttributeCount();
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
java.util.List
getAttributeOrBuilderList();
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
onnx.OnnxMl.AttributeProtoOrBuilder getAttributeOrBuilder(
int index);
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The docString.
*/
java.lang.String getDocString();
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The bytes for docString.
*/
org.nd4j.shade.protobuf.ByteString
getDocStringBytes();
}
/**
*
* Nodes
* Computation graphs are made up of a DAG of nodes, which represent what is
* commonly called a "layer" or "pipeline stage" in machine learning frameworks.
* For example, it can be a node of type "Conv" that takes in an image, a filter
* tensor and a bias tensor, and produces the convolved output.
*
*
* Protobuf type {@code onnx.NodeProto}
*/
public static final class NodeProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.NodeProto)
NodeProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use NodeProto.newBuilder() to construct.
private NodeProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private NodeProto() {
input_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
output_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
name_ = "";
opType_ = "";
domain_ = "";
attribute_ = java.util.Collections.emptyList();
docString_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NodeProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private NodeProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
input_ = new org.nd4j.shade.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
input_.add(s);
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
output_ = new org.nd4j.shade.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
output_.add(s);
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 34: {
java.lang.String s = input.readStringRequireUtf8();
opType_ = s;
break;
}
case 42: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
attribute_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
attribute_.add(
input.readMessage(onnx.OnnxMl.AttributeProto.parser(), extensionRegistry));
break;
}
case 50: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
case 58: {
java.lang.String s = input.readStringRequireUtf8();
domain_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
input_ = input_.getUnmodifiableView();
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
output_ = output_.getUnmodifiableView();
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_NodeProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_NodeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.NodeProto.class, onnx.OnnxMl.NodeProto.Builder.class);
}
public static final int INPUT_FIELD_NUMBER = 1;
private org.nd4j.shade.protobuf.LazyStringList input_;
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return A list containing the input.
*/
public org.nd4j.shade.protobuf.ProtocolStringList
getInputList() {
return input_;
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return The count of input.
*/
public int getInputCount() {
return input_.size();
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index of the element to return.
* @return The input at the given index.
*/
public java.lang.String getInput(int index) {
return input_.get(index);
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index of the value to return.
* @return The bytes of the input at the given index.
*/
public org.nd4j.shade.protobuf.ByteString
getInputBytes(int index) {
return input_.getByteString(index);
}
public static final int OUTPUT_FIELD_NUMBER = 2;
private org.nd4j.shade.protobuf.LazyStringList output_;
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return A list containing the output.
*/
public org.nd4j.shade.protobuf.ProtocolStringList
getOutputList() {
return output_;
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return The count of output.
*/
public int getOutputCount() {
return output_.size();
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index of the element to return.
* @return The output at the given index.
*/
public java.lang.String getOutput(int index) {
return output_.get(index);
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index of the value to return.
* @return The bytes of the output at the given index.
*/
public org.nd4j.shade.protobuf.ByteString
getOutputBytes(int index) {
return output_.getByteString(index);
}
public static final int NAME_FIELD_NUMBER = 3;
private volatile java.lang.Object name_;
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return The bytes for name.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int OP_TYPE_FIELD_NUMBER = 4;
private volatile java.lang.Object opType_;
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return The opType.
*/
@java.lang.Override
public java.lang.String getOpType() {
java.lang.Object ref = opType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
opType_ = s;
return s;
}
}
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return The bytes for opType.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getOpTypeBytes() {
java.lang.Object ref = opType_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
opType_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DOMAIN_FIELD_NUMBER = 7;
private volatile java.lang.Object domain_;
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return The domain.
*/
@java.lang.Override
public java.lang.String getDomain() {
java.lang.Object ref = domain_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
domain_ = s;
return s;
}
}
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return The bytes for domain.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDomainBytes() {
java.lang.Object ref = domain_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
domain_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int ATTRIBUTE_FIELD_NUMBER = 5;
private java.util.List attribute_;
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
@java.lang.Override
public java.util.List getAttributeList() {
return attribute_;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
@java.lang.Override
public java.util.List
getAttributeOrBuilderList() {
return attribute_;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
@java.lang.Override
public int getAttributeCount() {
return attribute_.size();
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
@java.lang.Override
public onnx.OnnxMl.AttributeProto getAttribute(int index) {
return attribute_.get(index);
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
@java.lang.Override
public onnx.OnnxMl.AttributeProtoOrBuilder getAttributeOrBuilder(
int index) {
return attribute_.get(index);
}
public static final int DOC_STRING_FIELD_NUMBER = 6;
private volatile java.lang.Object docString_;
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The docString.
*/
@java.lang.Override
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
}
}
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The bytes for docString.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < input_.size(); i++) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, input_.getRaw(i));
}
for (int i = 0; i < output_.size(); i++) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, output_.getRaw(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 3, name_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(opType_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 4, opType_);
}
for (int i = 0; i < attribute_.size(); i++) {
output.writeMessage(5, attribute_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 6, docString_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(domain_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 7, domain_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < input_.size(); i++) {
dataSize += computeStringSizeNoTag(input_.getRaw(i));
}
size += dataSize;
size += 1 * getInputList().size();
}
{
int dataSize = 0;
for (int i = 0; i < output_.size(); i++) {
dataSize += computeStringSizeNoTag(output_.getRaw(i));
}
size += dataSize;
size += 1 * getOutputList().size();
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(3, name_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(opType_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(4, opType_);
}
for (int i = 0; i < attribute_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(5, attribute_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(6, docString_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(domain_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(7, domain_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.NodeProto)) {
return super.equals(obj);
}
onnx.OnnxMl.NodeProto other = (onnx.OnnxMl.NodeProto) obj;
if (!getInputList()
.equals(other.getInputList())) return false;
if (!getOutputList()
.equals(other.getOutputList())) return false;
if (!getName()
.equals(other.getName())) return false;
if (!getOpType()
.equals(other.getOpType())) return false;
if (!getDomain()
.equals(other.getDomain())) return false;
if (!getAttributeList()
.equals(other.getAttributeList())) return false;
if (!getDocString()
.equals(other.getDocString())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getInputCount() > 0) {
hash = (37 * hash) + INPUT_FIELD_NUMBER;
hash = (53 * hash) + getInputList().hashCode();
}
if (getOutputCount() > 0) {
hash = (37 * hash) + OUTPUT_FIELD_NUMBER;
hash = (53 * hash) + getOutputList().hashCode();
}
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + OP_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getOpType().hashCode();
hash = (37 * hash) + DOMAIN_FIELD_NUMBER;
hash = (53 * hash) + getDomain().hashCode();
if (getAttributeCount() > 0) {
hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getAttributeList().hashCode();
}
hash = (37 * hash) + DOC_STRING_FIELD_NUMBER;
hash = (53 * hash) + getDocString().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.NodeProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.NodeProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.NodeProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.NodeProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.NodeProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.NodeProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.NodeProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.NodeProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.NodeProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.NodeProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.NodeProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.NodeProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.NodeProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Nodes
* Computation graphs are made up of a DAG of nodes, which represent what is
* commonly called a "layer" or "pipeline stage" in machine learning frameworks.
* For example, it can be a node of type "Conv" that takes in an image, a filter
* tensor and a bias tensor, and produces the convolved output.
*
*
* Protobuf type {@code onnx.NodeProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.NodeProto)
onnx.OnnxMl.NodeProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_NodeProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_NodeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.NodeProto.class, onnx.OnnxMl.NodeProto.Builder.class);
}
// Construct using onnx.OnnxMl.NodeProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getAttributeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
input_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
output_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
name_ = "";
opType_ = "";
domain_ = "";
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
attributeBuilder_.clear();
}
docString_ = "";
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_NodeProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.NodeProto getDefaultInstanceForType() {
return onnx.OnnxMl.NodeProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.NodeProto build() {
onnx.OnnxMl.NodeProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.NodeProto buildPartial() {
onnx.OnnxMl.NodeProto result = new onnx.OnnxMl.NodeProto(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
input_ = input_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.input_ = input_;
if (((bitField0_ & 0x00000002) != 0)) {
output_ = output_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.output_ = output_;
result.name_ = name_;
result.opType_ = opType_;
result.domain_ = domain_;
if (attributeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)) {
attribute_ = java.util.Collections.unmodifiableList(attribute_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.attribute_ = attribute_;
} else {
result.attribute_ = attributeBuilder_.build();
}
result.docString_ = docString_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.NodeProto) {
return mergeFrom((onnx.OnnxMl.NodeProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.NodeProto other) {
if (other == onnx.OnnxMl.NodeProto.getDefaultInstance()) return this;
if (!other.input_.isEmpty()) {
if (input_.isEmpty()) {
input_ = other.input_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureInputIsMutable();
input_.addAll(other.input_);
}
onChanged();
}
if (!other.output_.isEmpty()) {
if (output_.isEmpty()) {
output_ = other.output_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureOutputIsMutable();
output_.addAll(other.output_);
}
onChanged();
}
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getOpType().isEmpty()) {
opType_ = other.opType_;
onChanged();
}
if (!other.getDomain().isEmpty()) {
domain_ = other.domain_;
onChanged();
}
if (attributeBuilder_ == null) {
if (!other.attribute_.isEmpty()) {
if (attribute_.isEmpty()) {
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureAttributeIsMutable();
attribute_.addAll(other.attribute_);
}
onChanged();
}
} else {
if (!other.attribute_.isEmpty()) {
if (attributeBuilder_.isEmpty()) {
attributeBuilder_.dispose();
attributeBuilder_ = null;
attribute_ = other.attribute_;
bitField0_ = (bitField0_ & ~0x00000004);
attributeBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getAttributeFieldBuilder() : null;
} else {
attributeBuilder_.addAllMessages(other.attribute_);
}
}
}
if (!other.getDocString().isEmpty()) {
docString_ = other.docString_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.NodeProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.NodeProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.nd4j.shade.protobuf.LazyStringList input_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
private void ensureInputIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
input_ = new org.nd4j.shade.protobuf.LazyStringArrayList(input_);
bitField0_ |= 0x00000001;
}
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return A list containing the input.
*/
public org.nd4j.shade.protobuf.ProtocolStringList
getInputList() {
return input_.getUnmodifiableView();
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return The count of input.
*/
public int getInputCount() {
return input_.size();
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index of the element to return.
* @return The input at the given index.
*/
public java.lang.String getInput(int index) {
return input_.get(index);
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index of the value to return.
* @return The bytes of the input at the given index.
*/
public org.nd4j.shade.protobuf.ByteString
getInputBytes(int index) {
return input_.getByteString(index);
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param index The index to set the value at.
* @param value The input to set.
* @return This builder for chaining.
*/
public Builder setInput(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureInputIsMutable();
input_.set(index, value);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param value The input to add.
* @return This builder for chaining.
*/
public Builder addInput(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureInputIsMutable();
input_.add(value);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param values The input to add.
* @return This builder for chaining.
*/
public Builder addAllInput(
java.lang.Iterable values) {
ensureInputIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, input_);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @return This builder for chaining.
*/
public Builder clearInput() {
input_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string input = 1;
* @param value The bytes of the input to add.
* @return This builder for chaining.
*/
public Builder addInputBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureInputIsMutable();
input_.add(value);
onChanged();
return this;
}
private org.nd4j.shade.protobuf.LazyStringList output_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
private void ensureOutputIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
output_ = new org.nd4j.shade.protobuf.LazyStringArrayList(output_);
bitField0_ |= 0x00000002;
}
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return A list containing the output.
*/
public org.nd4j.shade.protobuf.ProtocolStringList
getOutputList() {
return output_.getUnmodifiableView();
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return The count of output.
*/
public int getOutputCount() {
return output_.size();
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index of the element to return.
* @return The output at the given index.
*/
public java.lang.String getOutput(int index) {
return output_.get(index);
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index of the value to return.
* @return The bytes of the output at the given index.
*/
public org.nd4j.shade.protobuf.ByteString
getOutputBytes(int index) {
return output_.getByteString(index);
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param index The index to set the value at.
* @param value The output to set.
* @return This builder for chaining.
*/
public Builder setOutput(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputIsMutable();
output_.set(index, value);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param value The output to add.
* @return This builder for chaining.
*/
public Builder addOutput(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputIsMutable();
output_.add(value);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param values The output to add.
* @return This builder for chaining.
*/
public Builder addAllOutput(
java.lang.Iterable values) {
ensureOutputIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, output_);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @return This builder for chaining.
*/
public Builder clearOutput() {
output_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
* namespace Value
*
*
* repeated string output = 2;
* @param value The bytes of the output to add.
* @return This builder for chaining.
*/
public Builder addOutputBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureOutputIsMutable();
output_.add(value);
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return The bytes for name.
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
* An optional identifier for this node in a graph.
* This field MAY be absent in ths version of the IR.
*
*
* string name = 3;
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object opType_ = "";
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return The opType.
*/
public java.lang.String getOpType() {
java.lang.Object ref = opType_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
opType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return The bytes for opType.
*/
public org.nd4j.shade.protobuf.ByteString
getOpTypeBytes() {
java.lang.Object ref = opType_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
opType_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @param value The opType to set.
* @return This builder for chaining.
*/
public Builder setOpType(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
opType_ = value;
onChanged();
return this;
}
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @return This builder for chaining.
*/
public Builder clearOpType() {
opType_ = getDefaultInstance().getOpType();
onChanged();
return this;
}
/**
*
* The symbolic identifier of the Operator to execute.
*
*
* string op_type = 4;
* @param value The bytes for opType to set.
* @return This builder for chaining.
*/
public Builder setOpTypeBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
opType_ = value;
onChanged();
return this;
}
private java.lang.Object domain_ = "";
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return The domain.
*/
public java.lang.String getDomain() {
java.lang.Object ref = domain_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
domain_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return The bytes for domain.
*/
public org.nd4j.shade.protobuf.ByteString
getDomainBytes() {
java.lang.Object ref = domain_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
domain_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @param value The domain to set.
* @return This builder for chaining.
*/
public Builder setDomain(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
domain_ = value;
onChanged();
return this;
}
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @return This builder for chaining.
*/
public Builder clearDomain() {
domain_ = getDefaultInstance().getDomain();
onChanged();
return this;
}
/**
*
* The domain of the OperatorSet that specifies the operator named by op_type.
*
*
* string domain = 7;
* @param value The bytes for domain to set.
* @return This builder for chaining.
*/
public Builder setDomainBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
domain_ = value;
onChanged();
return this;
}
private java.util.List attribute_ =
java.util.Collections.emptyList();
private void ensureAttributeIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
attribute_ = new java.util.ArrayList(attribute_);
bitField0_ |= 0x00000004;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.AttributeProto, onnx.OnnxMl.AttributeProto.Builder, onnx.OnnxMl.AttributeProtoOrBuilder> attributeBuilder_;
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public java.util.List getAttributeList() {
if (attributeBuilder_ == null) {
return java.util.Collections.unmodifiableList(attribute_);
} else {
return attributeBuilder_.getMessageList();
}
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public int getAttributeCount() {
if (attributeBuilder_ == null) {
return attribute_.size();
} else {
return attributeBuilder_.getCount();
}
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public onnx.OnnxMl.AttributeProto getAttribute(int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index);
} else {
return attributeBuilder_.getMessage(index);
}
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder setAttribute(
int index, onnx.OnnxMl.AttributeProto value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.set(index, value);
onChanged();
} else {
attributeBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder setAttribute(
int index, onnx.OnnxMl.AttributeProto.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.set(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder addAttribute(onnx.OnnxMl.AttributeProto value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(value);
onChanged();
} else {
attributeBuilder_.addMessage(value);
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder addAttribute(
int index, onnx.OnnxMl.AttributeProto value) {
if (attributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAttributeIsMutable();
attribute_.add(index, value);
onChanged();
} else {
attributeBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder addAttribute(
onnx.OnnxMl.AttributeProto.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder addAttribute(
int index, onnx.OnnxMl.AttributeProto.Builder builderForValue) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.add(index, builderForValue.build());
onChanged();
} else {
attributeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder addAllAttribute(
java.lang.Iterable values) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, attribute_);
onChanged();
} else {
attributeBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder clearAttribute() {
if (attributeBuilder_ == null) {
attribute_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
attributeBuilder_.clear();
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public Builder removeAttribute(int index) {
if (attributeBuilder_ == null) {
ensureAttributeIsMutable();
attribute_.remove(index);
onChanged();
} else {
attributeBuilder_.remove(index);
}
return this;
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public onnx.OnnxMl.AttributeProto.Builder getAttributeBuilder(
int index) {
return getAttributeFieldBuilder().getBuilder(index);
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public onnx.OnnxMl.AttributeProtoOrBuilder getAttributeOrBuilder(
int index) {
if (attributeBuilder_ == null) {
return attribute_.get(index); } else {
return attributeBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public java.util.List
getAttributeOrBuilderList() {
if (attributeBuilder_ != null) {
return attributeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(attribute_);
}
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public onnx.OnnxMl.AttributeProto.Builder addAttributeBuilder() {
return getAttributeFieldBuilder().addBuilder(
onnx.OnnxMl.AttributeProto.getDefaultInstance());
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public onnx.OnnxMl.AttributeProto.Builder addAttributeBuilder(
int index) {
return getAttributeFieldBuilder().addBuilder(
index, onnx.OnnxMl.AttributeProto.getDefaultInstance());
}
/**
*
* Additional named attributes.
*
*
* repeated .onnx.AttributeProto attribute = 5;
*/
public java.util.List
getAttributeBuilderList() {
return getAttributeFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.AttributeProto, onnx.OnnxMl.AttributeProto.Builder, onnx.OnnxMl.AttributeProtoOrBuilder>
getAttributeFieldBuilder() {
if (attributeBuilder_ == null) {
attributeBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.AttributeProto, onnx.OnnxMl.AttributeProto.Builder, onnx.OnnxMl.AttributeProtoOrBuilder>(
attribute_,
((bitField0_ & 0x00000004) != 0),
getParentForChildren(),
isClean());
attribute_ = null;
}
return attributeBuilder_;
}
private java.lang.Object docString_ = "";
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The docString.
*/
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The bytes for docString.
*/
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @param value The docString to set.
* @return This builder for chaining.
*/
public Builder setDocString(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
docString_ = value;
onChanged();
return this;
}
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @return This builder for chaining.
*/
public Builder clearDocString() {
docString_ = getDefaultInstance().getDocString();
onChanged();
return this;
}
/**
*
* A human-readable documentation for this node. Markdown is allowed.
*
*
* string doc_string = 6;
* @param value The bytes for docString to set.
* @return This builder for chaining.
*/
public Builder setDocStringBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
docString_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.NodeProto)
}
// @@protoc_insertion_point(class_scope:onnx.NodeProto)
private static final onnx.OnnxMl.NodeProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.NodeProto();
}
public static onnx.OnnxMl.NodeProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public NodeProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new NodeProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.NodeProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface TrainingInfoProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.TrainingInfoProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
* @return Whether the initialization field is set.
*/
boolean hasInitialization();
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
* @return The initialization.
*/
onnx.OnnxMl.GraphProto getInitialization();
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
onnx.OnnxMl.GraphProtoOrBuilder getInitializationOrBuilder();
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
* @return Whether the algorithm field is set.
*/
boolean hasAlgorithm();
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
* @return The algorithm.
*/
onnx.OnnxMl.GraphProto getAlgorithm();
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
onnx.OnnxMl.GraphProtoOrBuilder getAlgorithmOrBuilder();
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
java.util.List
getInitializationBindingList();
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
onnx.OnnxMl.StringStringEntryProto getInitializationBinding(int index);
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
int getInitializationBindingCount();
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
java.util.List
getInitializationBindingOrBuilderList();
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
onnx.OnnxMl.StringStringEntryProtoOrBuilder getInitializationBindingOrBuilder(
int index);
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
java.util.List
getUpdateBindingList();
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
onnx.OnnxMl.StringStringEntryProto getUpdateBinding(int index);
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
int getUpdateBindingCount();
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
java.util.List
getUpdateBindingOrBuilderList();
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
onnx.OnnxMl.StringStringEntryProtoOrBuilder getUpdateBindingOrBuilder(
int index);
}
/**
*
* Training information
* TrainingInfoProto stores information for training a model.
* In particular, this defines two functionalities: an initialization-step
* and a training-algorithm-step. Initialization resets the model
* back to its original state as if no training has been performed.
* Training algorithm improves the model based on input data.
* The semantics of the initialization-step is that the initializers
* in ModelProto.graph and in TrainingInfoProto.algorithm are first
* initialized as specified by the initializers in the graph, and then
* updated by the "initialization_binding" in every instance in
* ModelProto.training_info.
* The field "algorithm" defines a computation graph which represents a
* training algorithm's step. After the execution of a
* TrainingInfoProto.algorithm, the initializers specified by "update_binding"
* may be immediately updated. If the targeted training algorithm contains
* consecutive update steps (such as block coordinate descent methods),
* the user needs to create a TrainingInfoProto for each step.
*
*
* Protobuf type {@code onnx.TrainingInfoProto}
*/
public static final class TrainingInfoProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.TrainingInfoProto)
TrainingInfoProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrainingInfoProto.newBuilder() to construct.
private TrainingInfoProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private TrainingInfoProto() {
initializationBinding_ = java.util.Collections.emptyList();
updateBinding_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TrainingInfoProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TrainingInfoProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
onnx.OnnxMl.GraphProto.Builder subBuilder = null;
if (initialization_ != null) {
subBuilder = initialization_.toBuilder();
}
initialization_ = input.readMessage(onnx.OnnxMl.GraphProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(initialization_);
initialization_ = subBuilder.buildPartial();
}
break;
}
case 18: {
onnx.OnnxMl.GraphProto.Builder subBuilder = null;
if (algorithm_ != null) {
subBuilder = algorithm_.toBuilder();
}
algorithm_ = input.readMessage(onnx.OnnxMl.GraphProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(algorithm_);
algorithm_ = subBuilder.buildPartial();
}
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
initializationBinding_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
initializationBinding_.add(
input.readMessage(onnx.OnnxMl.StringStringEntryProto.parser(), extensionRegistry));
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
updateBinding_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000002;
}
updateBinding_.add(
input.readMessage(onnx.OnnxMl.StringStringEntryProto.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
initializationBinding_ = java.util.Collections.unmodifiableList(initializationBinding_);
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
updateBinding_ = java.util.Collections.unmodifiableList(updateBinding_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TrainingInfoProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TrainingInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TrainingInfoProto.class, onnx.OnnxMl.TrainingInfoProto.Builder.class);
}
public static final int INITIALIZATION_FIELD_NUMBER = 1;
private onnx.OnnxMl.GraphProto initialization_;
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
* @return Whether the initialization field is set.
*/
@java.lang.Override
public boolean hasInitialization() {
return initialization_ != null;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
* @return The initialization.
*/
@java.lang.Override
public onnx.OnnxMl.GraphProto getInitialization() {
return initialization_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : initialization_;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
@java.lang.Override
public onnx.OnnxMl.GraphProtoOrBuilder getInitializationOrBuilder() {
return getInitialization();
}
public static final int ALGORITHM_FIELD_NUMBER = 2;
private onnx.OnnxMl.GraphProto algorithm_;
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
* @return Whether the algorithm field is set.
*/
@java.lang.Override
public boolean hasAlgorithm() {
return algorithm_ != null;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
* @return The algorithm.
*/
@java.lang.Override
public onnx.OnnxMl.GraphProto getAlgorithm() {
return algorithm_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : algorithm_;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
@java.lang.Override
public onnx.OnnxMl.GraphProtoOrBuilder getAlgorithmOrBuilder() {
return getAlgorithm();
}
public static final int INITIALIZATION_BINDING_FIELD_NUMBER = 3;
private java.util.List initializationBinding_;
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
@java.lang.Override
public java.util.List getInitializationBindingList() {
return initializationBinding_;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
@java.lang.Override
public java.util.List
getInitializationBindingOrBuilderList() {
return initializationBinding_;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
@java.lang.Override
public int getInitializationBindingCount() {
return initializationBinding_.size();
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getInitializationBinding(int index) {
return initializationBinding_.get(index);
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getInitializationBindingOrBuilder(
int index) {
return initializationBinding_.get(index);
}
public static final int UPDATE_BINDING_FIELD_NUMBER = 4;
private java.util.List updateBinding_;
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
@java.lang.Override
public java.util.List getUpdateBindingList() {
return updateBinding_;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
@java.lang.Override
public java.util.List
getUpdateBindingOrBuilderList() {
return updateBinding_;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
@java.lang.Override
public int getUpdateBindingCount() {
return updateBinding_.size();
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getUpdateBinding(int index) {
return updateBinding_.get(index);
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getUpdateBindingOrBuilder(
int index) {
return updateBinding_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (initialization_ != null) {
output.writeMessage(1, getInitialization());
}
if (algorithm_ != null) {
output.writeMessage(2, getAlgorithm());
}
for (int i = 0; i < initializationBinding_.size(); i++) {
output.writeMessage(3, initializationBinding_.get(i));
}
for (int i = 0; i < updateBinding_.size(); i++) {
output.writeMessage(4, updateBinding_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (initialization_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(1, getInitialization());
}
if (algorithm_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(2, getAlgorithm());
}
for (int i = 0; i < initializationBinding_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(3, initializationBinding_.get(i));
}
for (int i = 0; i < updateBinding_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(4, updateBinding_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.TrainingInfoProto)) {
return super.equals(obj);
}
onnx.OnnxMl.TrainingInfoProto other = (onnx.OnnxMl.TrainingInfoProto) obj;
if (hasInitialization() != other.hasInitialization()) return false;
if (hasInitialization()) {
if (!getInitialization()
.equals(other.getInitialization())) return false;
}
if (hasAlgorithm() != other.hasAlgorithm()) return false;
if (hasAlgorithm()) {
if (!getAlgorithm()
.equals(other.getAlgorithm())) return false;
}
if (!getInitializationBindingList()
.equals(other.getInitializationBindingList())) return false;
if (!getUpdateBindingList()
.equals(other.getUpdateBindingList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInitialization()) {
hash = (37 * hash) + INITIALIZATION_FIELD_NUMBER;
hash = (53 * hash) + getInitialization().hashCode();
}
if (hasAlgorithm()) {
hash = (37 * hash) + ALGORITHM_FIELD_NUMBER;
hash = (53 * hash) + getAlgorithm().hashCode();
}
if (getInitializationBindingCount() > 0) {
hash = (37 * hash) + INITIALIZATION_BINDING_FIELD_NUMBER;
hash = (53 * hash) + getInitializationBindingList().hashCode();
}
if (getUpdateBindingCount() > 0) {
hash = (37 * hash) + UPDATE_BINDING_FIELD_NUMBER;
hash = (53 * hash) + getUpdateBindingList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TrainingInfoProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TrainingInfoProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TrainingInfoProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.TrainingInfoProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Training information
* TrainingInfoProto stores information for training a model.
* In particular, this defines two functionalities: an initialization-step
* and a training-algorithm-step. Initialization resets the model
* back to its original state as if no training has been performed.
* Training algorithm improves the model based on input data.
* The semantics of the initialization-step is that the initializers
* in ModelProto.graph and in TrainingInfoProto.algorithm are first
* initialized as specified by the initializers in the graph, and then
* updated by the "initialization_binding" in every instance in
* ModelProto.training_info.
* The field "algorithm" defines a computation graph which represents a
* training algorithm's step. After the execution of a
* TrainingInfoProto.algorithm, the initializers specified by "update_binding"
* may be immediately updated. If the targeted training algorithm contains
* consecutive update steps (such as block coordinate descent methods),
* the user needs to create a TrainingInfoProto for each step.
*
*
* Protobuf type {@code onnx.TrainingInfoProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.TrainingInfoProto)
onnx.OnnxMl.TrainingInfoProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TrainingInfoProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TrainingInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TrainingInfoProto.class, onnx.OnnxMl.TrainingInfoProto.Builder.class);
}
// Construct using onnx.OnnxMl.TrainingInfoProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getInitializationBindingFieldBuilder();
getUpdateBindingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (initializationBuilder_ == null) {
initialization_ = null;
} else {
initialization_ = null;
initializationBuilder_ = null;
}
if (algorithmBuilder_ == null) {
algorithm_ = null;
} else {
algorithm_ = null;
algorithmBuilder_ = null;
}
if (initializationBindingBuilder_ == null) {
initializationBinding_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
initializationBindingBuilder_.clear();
}
if (updateBindingBuilder_ == null) {
updateBinding_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
updateBindingBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_TrainingInfoProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.TrainingInfoProto getDefaultInstanceForType() {
return onnx.OnnxMl.TrainingInfoProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.TrainingInfoProto build() {
onnx.OnnxMl.TrainingInfoProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.TrainingInfoProto buildPartial() {
onnx.OnnxMl.TrainingInfoProto result = new onnx.OnnxMl.TrainingInfoProto(this);
int from_bitField0_ = bitField0_;
if (initializationBuilder_ == null) {
result.initialization_ = initialization_;
} else {
result.initialization_ = initializationBuilder_.build();
}
if (algorithmBuilder_ == null) {
result.algorithm_ = algorithm_;
} else {
result.algorithm_ = algorithmBuilder_.build();
}
if (initializationBindingBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
initializationBinding_ = java.util.Collections.unmodifiableList(initializationBinding_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.initializationBinding_ = initializationBinding_;
} else {
result.initializationBinding_ = initializationBindingBuilder_.build();
}
if (updateBindingBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
updateBinding_ = java.util.Collections.unmodifiableList(updateBinding_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.updateBinding_ = updateBinding_;
} else {
result.updateBinding_ = updateBindingBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.TrainingInfoProto) {
return mergeFrom((onnx.OnnxMl.TrainingInfoProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.TrainingInfoProto other) {
if (other == onnx.OnnxMl.TrainingInfoProto.getDefaultInstance()) return this;
if (other.hasInitialization()) {
mergeInitialization(other.getInitialization());
}
if (other.hasAlgorithm()) {
mergeAlgorithm(other.getAlgorithm());
}
if (initializationBindingBuilder_ == null) {
if (!other.initializationBinding_.isEmpty()) {
if (initializationBinding_.isEmpty()) {
initializationBinding_ = other.initializationBinding_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureInitializationBindingIsMutable();
initializationBinding_.addAll(other.initializationBinding_);
}
onChanged();
}
} else {
if (!other.initializationBinding_.isEmpty()) {
if (initializationBindingBuilder_.isEmpty()) {
initializationBindingBuilder_.dispose();
initializationBindingBuilder_ = null;
initializationBinding_ = other.initializationBinding_;
bitField0_ = (bitField0_ & ~0x00000001);
initializationBindingBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getInitializationBindingFieldBuilder() : null;
} else {
initializationBindingBuilder_.addAllMessages(other.initializationBinding_);
}
}
}
if (updateBindingBuilder_ == null) {
if (!other.updateBinding_.isEmpty()) {
if (updateBinding_.isEmpty()) {
updateBinding_ = other.updateBinding_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureUpdateBindingIsMutable();
updateBinding_.addAll(other.updateBinding_);
}
onChanged();
}
} else {
if (!other.updateBinding_.isEmpty()) {
if (updateBindingBuilder_.isEmpty()) {
updateBindingBuilder_.dispose();
updateBindingBuilder_ = null;
updateBinding_ = other.updateBinding_;
bitField0_ = (bitField0_ & ~0x00000002);
updateBindingBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getUpdateBindingFieldBuilder() : null;
} else {
updateBindingBuilder_.addAllMessages(other.updateBinding_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.TrainingInfoProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.TrainingInfoProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private onnx.OnnxMl.GraphProto initialization_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder> initializationBuilder_;
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
* @return Whether the initialization field is set.
*/
public boolean hasInitialization() {
return initializationBuilder_ != null || initialization_ != null;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
* @return The initialization.
*/
public onnx.OnnxMl.GraphProto getInitialization() {
if (initializationBuilder_ == null) {
return initialization_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : initialization_;
} else {
return initializationBuilder_.getMessage();
}
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
public Builder setInitialization(onnx.OnnxMl.GraphProto value) {
if (initializationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
initialization_ = value;
onChanged();
} else {
initializationBuilder_.setMessage(value);
}
return this;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
public Builder setInitialization(
onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (initializationBuilder_ == null) {
initialization_ = builderForValue.build();
onChanged();
} else {
initializationBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
public Builder mergeInitialization(onnx.OnnxMl.GraphProto value) {
if (initializationBuilder_ == null) {
if (initialization_ != null) {
initialization_ =
onnx.OnnxMl.GraphProto.newBuilder(initialization_).mergeFrom(value).buildPartial();
} else {
initialization_ = value;
}
onChanged();
} else {
initializationBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
public Builder clearInitialization() {
if (initializationBuilder_ == null) {
initialization_ = null;
onChanged();
} else {
initialization_ = null;
initializationBuilder_ = null;
}
return this;
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
public onnx.OnnxMl.GraphProto.Builder getInitializationBuilder() {
onChanged();
return getInitializationFieldBuilder().getBuilder();
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
public onnx.OnnxMl.GraphProtoOrBuilder getInitializationOrBuilder() {
if (initializationBuilder_ != null) {
return initializationBuilder_.getMessageOrBuilder();
} else {
return initialization_ == null ?
onnx.OnnxMl.GraphProto.getDefaultInstance() : initialization_;
}
}
/**
*
* This field describes a graph to compute the initial tensors
* upon starting the training process. Initialization graph has no input
* and can have multiple outputs. Usually, trainable tensors in neural
* networks are randomly initialized. To achieve that, for each tensor,
* the user can put a random number operator such as RandomNormal or
* RandomUniform in TrainingInfoProto.initialization.node and assign its
* random output to the specific tensor using "initialization_binding".
* This graph can also set the initializers in "algorithm" in the same
* TrainingInfoProto; a use case is resetting the number of training
* iteration to zero.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Thus, no initializer would be changed by default.
*
*
* .onnx.GraphProto initialization = 1;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>
getInitializationFieldBuilder() {
if (initializationBuilder_ == null) {
initializationBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>(
getInitialization(),
getParentForChildren(),
isClean());
initialization_ = null;
}
return initializationBuilder_;
}
private onnx.OnnxMl.GraphProto algorithm_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder> algorithmBuilder_;
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
* @return Whether the algorithm field is set.
*/
public boolean hasAlgorithm() {
return algorithmBuilder_ != null || algorithm_ != null;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
* @return The algorithm.
*/
public onnx.OnnxMl.GraphProto getAlgorithm() {
if (algorithmBuilder_ == null) {
return algorithm_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : algorithm_;
} else {
return algorithmBuilder_.getMessage();
}
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
public Builder setAlgorithm(onnx.OnnxMl.GraphProto value) {
if (algorithmBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
algorithm_ = value;
onChanged();
} else {
algorithmBuilder_.setMessage(value);
}
return this;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
public Builder setAlgorithm(
onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (algorithmBuilder_ == null) {
algorithm_ = builderForValue.build();
onChanged();
} else {
algorithmBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
public Builder mergeAlgorithm(onnx.OnnxMl.GraphProto value) {
if (algorithmBuilder_ == null) {
if (algorithm_ != null) {
algorithm_ =
onnx.OnnxMl.GraphProto.newBuilder(algorithm_).mergeFrom(value).buildPartial();
} else {
algorithm_ = value;
}
onChanged();
} else {
algorithmBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
public Builder clearAlgorithm() {
if (algorithmBuilder_ == null) {
algorithm_ = null;
onChanged();
} else {
algorithm_ = null;
algorithmBuilder_ = null;
}
return this;
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
public onnx.OnnxMl.GraphProto.Builder getAlgorithmBuilder() {
onChanged();
return getAlgorithmFieldBuilder().getBuilder();
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
public onnx.OnnxMl.GraphProtoOrBuilder getAlgorithmOrBuilder() {
if (algorithmBuilder_ != null) {
return algorithmBuilder_.getMessageOrBuilder();
} else {
return algorithm_ == null ?
onnx.OnnxMl.GraphProto.getDefaultInstance() : algorithm_;
}
}
/**
*
* This field represents a training algorithm step. Given required inputs,
* it computes outputs to update initializers in its own or inference graph's
* initializer lists. In general, this field contains loss node, gradient node,
* optimizer node, increment of iteration count.
* An execution of the training algorithm step is performed by executing the
* graph obtained by combining the inference graph (namely "ModelProto.graph")
* and the "algorithm" graph. That is, the actual the actual
* input/initializer/output/node/value_info/sparse_initializer list of
* the training graph is the concatenation of
* "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
* and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
* in that order. This combined graph must satisfy the normal ONNX conditions.
* Now, let's provide a visualization of graph combination for clarity.
* Let the inference graph (i.e., "ModelProto.graph") be
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
* and the "algorithm" graph be
* tensor_d -> Add -> tensor_e
* The combination process results
* tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
* Notice that an input of a node in the "algorithm" graph may reference the
* output of a node in the inference graph (but not the other way round). Also, inference
* node cannot reference inputs of "algorithm". With these restrictions, inference graph
* can always be run independently without training information.
* By default, this field is an empty graph and its evaluation does not
* produce any output. Evaluating the default training step never
* update any initializers.
*
*
* .onnx.GraphProto algorithm = 2;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>
getAlgorithmFieldBuilder() {
if (algorithmBuilder_ == null) {
algorithmBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>(
getAlgorithm(),
getParentForChildren(),
isClean());
algorithm_ = null;
}
return algorithmBuilder_;
}
private java.util.List initializationBinding_ =
java.util.Collections.emptyList();
private void ensureInitializationBindingIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
initializationBinding_ = new java.util.ArrayList(initializationBinding_);
bitField0_ |= 0x00000001;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder> initializationBindingBuilder_;
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public java.util.List getInitializationBindingList() {
if (initializationBindingBuilder_ == null) {
return java.util.Collections.unmodifiableList(initializationBinding_);
} else {
return initializationBindingBuilder_.getMessageList();
}
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public int getInitializationBindingCount() {
if (initializationBindingBuilder_ == null) {
return initializationBinding_.size();
} else {
return initializationBindingBuilder_.getCount();
}
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public onnx.OnnxMl.StringStringEntryProto getInitializationBinding(int index) {
if (initializationBindingBuilder_ == null) {
return initializationBinding_.get(index);
} else {
return initializationBindingBuilder_.getMessage(index);
}
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder setInitializationBinding(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (initializationBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInitializationBindingIsMutable();
initializationBinding_.set(index, value);
onChanged();
} else {
initializationBindingBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder setInitializationBinding(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (initializationBindingBuilder_ == null) {
ensureInitializationBindingIsMutable();
initializationBinding_.set(index, builderForValue.build());
onChanged();
} else {
initializationBindingBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder addInitializationBinding(onnx.OnnxMl.StringStringEntryProto value) {
if (initializationBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInitializationBindingIsMutable();
initializationBinding_.add(value);
onChanged();
} else {
initializationBindingBuilder_.addMessage(value);
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder addInitializationBinding(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (initializationBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInitializationBindingIsMutable();
initializationBinding_.add(index, value);
onChanged();
} else {
initializationBindingBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder addInitializationBinding(
onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (initializationBindingBuilder_ == null) {
ensureInitializationBindingIsMutable();
initializationBinding_.add(builderForValue.build());
onChanged();
} else {
initializationBindingBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder addInitializationBinding(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (initializationBindingBuilder_ == null) {
ensureInitializationBindingIsMutable();
initializationBinding_.add(index, builderForValue.build());
onChanged();
} else {
initializationBindingBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder addAllInitializationBinding(
java.lang.Iterable values) {
if (initializationBindingBuilder_ == null) {
ensureInitializationBindingIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, initializationBinding_);
onChanged();
} else {
initializationBindingBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder clearInitializationBinding() {
if (initializationBindingBuilder_ == null) {
initializationBinding_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
initializationBindingBuilder_.clear();
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public Builder removeInitializationBinding(int index) {
if (initializationBindingBuilder_ == null) {
ensureInitializationBindingIsMutable();
initializationBinding_.remove(index);
onChanged();
} else {
initializationBindingBuilder_.remove(index);
}
return this;
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder getInitializationBindingBuilder(
int index) {
return getInitializationBindingFieldBuilder().getBuilder(index);
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getInitializationBindingOrBuilder(
int index) {
if (initializationBindingBuilder_ == null) {
return initializationBinding_.get(index); } else {
return initializationBindingBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public java.util.List
getInitializationBindingOrBuilderList() {
if (initializationBindingBuilder_ != null) {
return initializationBindingBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(initializationBinding_);
}
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addInitializationBindingBuilder() {
return getInitializationBindingFieldBuilder().addBuilder(
onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addInitializationBindingBuilder(
int index) {
return getInitializationBindingFieldBuilder().addBuilder(
index, onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* This field specifies the bindings from the outputs of "initialization" to
* some initializers in "ModelProto.graph.initializer" and
* the "algorithm.initializer" in the same TrainingInfoProto.
* See "update_binding" below for details.
* By default, this field is empty and no initializer would be changed
* by the execution of "initialization".
*
*
* repeated .onnx.StringStringEntryProto initialization_binding = 3;
*/
public java.util.List
getInitializationBindingBuilderList() {
return getInitializationBindingFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>
getInitializationBindingFieldBuilder() {
if (initializationBindingBuilder_ == null) {
initializationBindingBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>(
initializationBinding_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
initializationBinding_ = null;
}
return initializationBindingBuilder_;
}
private java.util.List updateBinding_ =
java.util.Collections.emptyList();
private void ensureUpdateBindingIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
updateBinding_ = new java.util.ArrayList(updateBinding_);
bitField0_ |= 0x00000002;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder> updateBindingBuilder_;
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public java.util.List getUpdateBindingList() {
if (updateBindingBuilder_ == null) {
return java.util.Collections.unmodifiableList(updateBinding_);
} else {
return updateBindingBuilder_.getMessageList();
}
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public int getUpdateBindingCount() {
if (updateBindingBuilder_ == null) {
return updateBinding_.size();
} else {
return updateBindingBuilder_.getCount();
}
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public onnx.OnnxMl.StringStringEntryProto getUpdateBinding(int index) {
if (updateBindingBuilder_ == null) {
return updateBinding_.get(index);
} else {
return updateBindingBuilder_.getMessage(index);
}
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder setUpdateBinding(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (updateBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUpdateBindingIsMutable();
updateBinding_.set(index, value);
onChanged();
} else {
updateBindingBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder setUpdateBinding(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (updateBindingBuilder_ == null) {
ensureUpdateBindingIsMutable();
updateBinding_.set(index, builderForValue.build());
onChanged();
} else {
updateBindingBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder addUpdateBinding(onnx.OnnxMl.StringStringEntryProto value) {
if (updateBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUpdateBindingIsMutable();
updateBinding_.add(value);
onChanged();
} else {
updateBindingBuilder_.addMessage(value);
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder addUpdateBinding(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (updateBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUpdateBindingIsMutable();
updateBinding_.add(index, value);
onChanged();
} else {
updateBindingBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder addUpdateBinding(
onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (updateBindingBuilder_ == null) {
ensureUpdateBindingIsMutable();
updateBinding_.add(builderForValue.build());
onChanged();
} else {
updateBindingBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder addUpdateBinding(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (updateBindingBuilder_ == null) {
ensureUpdateBindingIsMutable();
updateBinding_.add(index, builderForValue.build());
onChanged();
} else {
updateBindingBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder addAllUpdateBinding(
java.lang.Iterable values) {
if (updateBindingBuilder_ == null) {
ensureUpdateBindingIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, updateBinding_);
onChanged();
} else {
updateBindingBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder clearUpdateBinding() {
if (updateBindingBuilder_ == null) {
updateBinding_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
updateBindingBuilder_.clear();
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public Builder removeUpdateBinding(int index) {
if (updateBindingBuilder_ == null) {
ensureUpdateBindingIsMutable();
updateBinding_.remove(index);
onChanged();
} else {
updateBindingBuilder_.remove(index);
}
return this;
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder getUpdateBindingBuilder(
int index) {
return getUpdateBindingFieldBuilder().getBuilder(index);
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getUpdateBindingOrBuilder(
int index) {
if (updateBindingBuilder_ == null) {
return updateBinding_.get(index); } else {
return updateBindingBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public java.util.List
getUpdateBindingOrBuilderList() {
if (updateBindingBuilder_ != null) {
return updateBindingBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(updateBinding_);
}
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addUpdateBindingBuilder() {
return getUpdateBindingFieldBuilder().addBuilder(
onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addUpdateBindingBuilder(
int index) {
return getUpdateBindingFieldBuilder().addBuilder(
index, onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* Gradient-based training is usually an iterative procedure. In one gradient
* descent iteration, we apply
* x = x - r * g
* where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
* gradient of "x" with respect to a chosen loss. To avoid adding assignments
* into the training graph, we split the update equation into
* y = x - r * g
* x = y
* The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
* tell that "y" should be assigned to "x", the field "update_binding" may
* contain a key-value pair of strings, "x" (key of StringStringEntryProto)
* and "y" (value of StringStringEntryProto).
* For a neural network with multiple trainable (mutable) tensors, there can
* be multiple key-value pairs in "update_binding".
* The initializers appears as keys in "update_binding" are considered
* mutable variables. This implies some behaviors
* as described below.
* 1. We have only unique keys in all "update_binding"s so that two
* variables may not have the same name. This ensures that one
* variable is assigned up to once.
* 2. The keys must appear in names of "ModelProto.graph.initializer" or
* "TrainingInfoProto.algorithm.initializer".
* 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
* 4. Mutable variables are initialized to the value specified by the
* corresponding initializer, and then potentially updated by
* "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
* This field usually contains names of trainable tensors
* (in ModelProto.graph), optimizer states such as momentums in advanced
* stochastic gradient methods (in TrainingInfoProto.graph),
* and number of training iterations (in TrainingInfoProto.graph).
* By default, this field is empty and no initializer would be changed
* by the execution of "algorithm".
*
*
* repeated .onnx.StringStringEntryProto update_binding = 4;
*/
public java.util.List
getUpdateBindingBuilderList() {
return getUpdateBindingFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>
getUpdateBindingFieldBuilder() {
if (updateBindingBuilder_ == null) {
updateBindingBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>(
updateBinding_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
updateBinding_ = null;
}
return updateBindingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.TrainingInfoProto)
}
// @@protoc_insertion_point(class_scope:onnx.TrainingInfoProto)
private static final onnx.OnnxMl.TrainingInfoProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.TrainingInfoProto();
}
public static onnx.OnnxMl.TrainingInfoProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public TrainingInfoProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new TrainingInfoProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.TrainingInfoProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ModelProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.ModelProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* The version of the IR this model targets. See Version enum above.
* This field MUST be present.
*
*
* int64 ir_version = 1;
* @return The irVersion.
*/
long getIrVersion();
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
java.util.List
getOpsetImportList();
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
onnx.OnnxMl.OperatorSetIdProto getOpsetImport(int index);
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
int getOpsetImportCount();
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
java.util.List
getOpsetImportOrBuilderList();
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
onnx.OnnxMl.OperatorSetIdProtoOrBuilder getOpsetImportOrBuilder(
int index);
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return The producerName.
*/
java.lang.String getProducerName();
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return The bytes for producerName.
*/
org.nd4j.shade.protobuf.ByteString
getProducerNameBytes();
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return The producerVersion.
*/
java.lang.String getProducerVersion();
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return The bytes for producerVersion.
*/
org.nd4j.shade.protobuf.ByteString
getProducerVersionBytes();
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return The domain.
*/
java.lang.String getDomain();
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return The bytes for domain.
*/
org.nd4j.shade.protobuf.ByteString
getDomainBytes();
/**
*
* The version of the graph encoded. See Version enum below.
*
*
* int64 model_version = 5;
* @return The modelVersion.
*/
long getModelVersion();
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The docString.
*/
java.lang.String getDocString();
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The bytes for docString.
*/
org.nd4j.shade.protobuf.ByteString
getDocStringBytes();
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
* @return Whether the graph field is set.
*/
boolean hasGraph();
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
* @return The graph.
*/
onnx.OnnxMl.GraphProto getGraph();
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
onnx.OnnxMl.GraphProtoOrBuilder getGraphOrBuilder();
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
java.util.List
getMetadataPropsList();
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
onnx.OnnxMl.StringStringEntryProto getMetadataProps(int index);
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
int getMetadataPropsCount();
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
java.util.List
getMetadataPropsOrBuilderList();
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
onnx.OnnxMl.StringStringEntryProtoOrBuilder getMetadataPropsOrBuilder(
int index);
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
java.util.List
getTrainingInfoList();
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
onnx.OnnxMl.TrainingInfoProto getTrainingInfo(int index);
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
int getTrainingInfoCount();
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
java.util.List
getTrainingInfoOrBuilderList();
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
onnx.OnnxMl.TrainingInfoProtoOrBuilder getTrainingInfoOrBuilder(
int index);
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
java.util.List
getFunctionsList();
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
onnx.OnnxMl.FunctionProto getFunctions(int index);
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
int getFunctionsCount();
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
java.util.List
getFunctionsOrBuilderList();
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
onnx.OnnxMl.FunctionProtoOrBuilder getFunctionsOrBuilder(
int index);
}
/**
*
* Models
* ModelProto is a top-level file/container format for bundling a ML model and
* associating its computation graph with metadata.
* The semantics of the model are described by the associated GraphProto's.
*
*
* Protobuf type {@code onnx.ModelProto}
*/
public static final class ModelProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.ModelProto)
ModelProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ModelProto.newBuilder() to construct.
private ModelProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private ModelProto() {
opsetImport_ = java.util.Collections.emptyList();
producerName_ = "";
producerVersion_ = "";
domain_ = "";
docString_ = "";
metadataProps_ = java.util.Collections.emptyList();
trainingInfo_ = java.util.Collections.emptyList();
functions_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ModelProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ModelProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
irVersion_ = input.readInt64();
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
producerName_ = s;
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
producerVersion_ = s;
break;
}
case 34: {
java.lang.String s = input.readStringRequireUtf8();
domain_ = s;
break;
}
case 40: {
modelVersion_ = input.readInt64();
break;
}
case 50: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
case 58: {
onnx.OnnxMl.GraphProto.Builder subBuilder = null;
if (graph_ != null) {
subBuilder = graph_.toBuilder();
}
graph_ = input.readMessage(onnx.OnnxMl.GraphProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(graph_);
graph_ = subBuilder.buildPartial();
}
break;
}
case 66: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
opsetImport_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
opsetImport_.add(
input.readMessage(onnx.OnnxMl.OperatorSetIdProto.parser(), extensionRegistry));
break;
}
case 114: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
metadataProps_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000002;
}
metadataProps_.add(
input.readMessage(onnx.OnnxMl.StringStringEntryProto.parser(), extensionRegistry));
break;
}
case 162: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
trainingInfo_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
trainingInfo_.add(
input.readMessage(onnx.OnnxMl.TrainingInfoProto.parser(), extensionRegistry));
break;
}
case 202: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
functions_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
functions_.add(
input.readMessage(onnx.OnnxMl.FunctionProto.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
opsetImport_ = java.util.Collections.unmodifiableList(opsetImport_);
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
metadataProps_ = java.util.Collections.unmodifiableList(metadataProps_);
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
trainingInfo_ = java.util.Collections.unmodifiableList(trainingInfo_);
}
if (((mutable_bitField0_ & 0x00000008) != 0)) {
functions_ = java.util.Collections.unmodifiableList(functions_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_ModelProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_ModelProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.ModelProto.class, onnx.OnnxMl.ModelProto.Builder.class);
}
public static final int IR_VERSION_FIELD_NUMBER = 1;
private long irVersion_;
/**
*
* The version of the IR this model targets. See Version enum above.
* This field MUST be present.
*
*
* int64 ir_version = 1;
* @return The irVersion.
*/
@java.lang.Override
public long getIrVersion() {
return irVersion_;
}
public static final int OPSET_IMPORT_FIELD_NUMBER = 8;
private java.util.List opsetImport_;
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
@java.lang.Override
public java.util.List getOpsetImportList() {
return opsetImport_;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
@java.lang.Override
public java.util.List
getOpsetImportOrBuilderList() {
return opsetImport_;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
@java.lang.Override
public int getOpsetImportCount() {
return opsetImport_.size();
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
@java.lang.Override
public onnx.OnnxMl.OperatorSetIdProto getOpsetImport(int index) {
return opsetImport_.get(index);
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
@java.lang.Override
public onnx.OnnxMl.OperatorSetIdProtoOrBuilder getOpsetImportOrBuilder(
int index) {
return opsetImport_.get(index);
}
public static final int PRODUCER_NAME_FIELD_NUMBER = 2;
private volatile java.lang.Object producerName_;
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return The producerName.
*/
@java.lang.Override
public java.lang.String getProducerName() {
java.lang.Object ref = producerName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
producerName_ = s;
return s;
}
}
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return The bytes for producerName.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getProducerNameBytes() {
java.lang.Object ref = producerName_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
producerName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int PRODUCER_VERSION_FIELD_NUMBER = 3;
private volatile java.lang.Object producerVersion_;
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return The producerVersion.
*/
@java.lang.Override
public java.lang.String getProducerVersion() {
java.lang.Object ref = producerVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
producerVersion_ = s;
return s;
}
}
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return The bytes for producerVersion.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getProducerVersionBytes() {
java.lang.Object ref = producerVersion_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
producerVersion_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DOMAIN_FIELD_NUMBER = 4;
private volatile java.lang.Object domain_;
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return The domain.
*/
@java.lang.Override
public java.lang.String getDomain() {
java.lang.Object ref = domain_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
domain_ = s;
return s;
}
}
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return The bytes for domain.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDomainBytes() {
java.lang.Object ref = domain_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
domain_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int MODEL_VERSION_FIELD_NUMBER = 5;
private long modelVersion_;
/**
*
* The version of the graph encoded. See Version enum below.
*
*
* int64 model_version = 5;
* @return The modelVersion.
*/
@java.lang.Override
public long getModelVersion() {
return modelVersion_;
}
public static final int DOC_STRING_FIELD_NUMBER = 6;
private volatile java.lang.Object docString_;
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The docString.
*/
@java.lang.Override
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
}
}
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The bytes for docString.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int GRAPH_FIELD_NUMBER = 7;
private onnx.OnnxMl.GraphProto graph_;
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
* @return Whether the graph field is set.
*/
@java.lang.Override
public boolean hasGraph() {
return graph_ != null;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
* @return The graph.
*/
@java.lang.Override
public onnx.OnnxMl.GraphProto getGraph() {
return graph_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : graph_;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
@java.lang.Override
public onnx.OnnxMl.GraphProtoOrBuilder getGraphOrBuilder() {
return getGraph();
}
public static final int METADATA_PROPS_FIELD_NUMBER = 14;
private java.util.List metadataProps_;
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
@java.lang.Override
public java.util.List getMetadataPropsList() {
return metadataProps_;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
@java.lang.Override
public java.util.List
getMetadataPropsOrBuilderList() {
return metadataProps_;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
@java.lang.Override
public int getMetadataPropsCount() {
return metadataProps_.size();
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getMetadataProps(int index) {
return metadataProps_.get(index);
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getMetadataPropsOrBuilder(
int index) {
return metadataProps_.get(index);
}
public static final int TRAINING_INFO_FIELD_NUMBER = 20;
private java.util.List trainingInfo_;
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
@java.lang.Override
public java.util.List getTrainingInfoList() {
return trainingInfo_;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
@java.lang.Override
public java.util.List
getTrainingInfoOrBuilderList() {
return trainingInfo_;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
@java.lang.Override
public int getTrainingInfoCount() {
return trainingInfo_.size();
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
@java.lang.Override
public onnx.OnnxMl.TrainingInfoProto getTrainingInfo(int index) {
return trainingInfo_.get(index);
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
@java.lang.Override
public onnx.OnnxMl.TrainingInfoProtoOrBuilder getTrainingInfoOrBuilder(
int index) {
return trainingInfo_.get(index);
}
public static final int FUNCTIONS_FIELD_NUMBER = 25;
private java.util.List functions_;
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
@java.lang.Override
public java.util.List getFunctionsList() {
return functions_;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
@java.lang.Override
public java.util.List
getFunctionsOrBuilderList() {
return functions_;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
@java.lang.Override
public int getFunctionsCount() {
return functions_.size();
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
@java.lang.Override
public onnx.OnnxMl.FunctionProto getFunctions(int index) {
return functions_.get(index);
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
@java.lang.Override
public onnx.OnnxMl.FunctionProtoOrBuilder getFunctionsOrBuilder(
int index) {
return functions_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (irVersion_ != 0L) {
output.writeInt64(1, irVersion_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(producerName_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, producerName_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(producerVersion_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 3, producerVersion_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(domain_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 4, domain_);
}
if (modelVersion_ != 0L) {
output.writeInt64(5, modelVersion_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 6, docString_);
}
if (graph_ != null) {
output.writeMessage(7, getGraph());
}
for (int i = 0; i < opsetImport_.size(); i++) {
output.writeMessage(8, opsetImport_.get(i));
}
for (int i = 0; i < metadataProps_.size(); i++) {
output.writeMessage(14, metadataProps_.get(i));
}
for (int i = 0; i < trainingInfo_.size(); i++) {
output.writeMessage(20, trainingInfo_.get(i));
}
for (int i = 0; i < functions_.size(); i++) {
output.writeMessage(25, functions_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (irVersion_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(1, irVersion_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(producerName_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, producerName_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(producerVersion_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(3, producerVersion_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(domain_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(4, domain_);
}
if (modelVersion_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(5, modelVersion_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(6, docString_);
}
if (graph_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(7, getGraph());
}
for (int i = 0; i < opsetImport_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(8, opsetImport_.get(i));
}
for (int i = 0; i < metadataProps_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(14, metadataProps_.get(i));
}
for (int i = 0; i < trainingInfo_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(20, trainingInfo_.get(i));
}
for (int i = 0; i < functions_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(25, functions_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.ModelProto)) {
return super.equals(obj);
}
onnx.OnnxMl.ModelProto other = (onnx.OnnxMl.ModelProto) obj;
if (getIrVersion()
!= other.getIrVersion()) return false;
if (!getOpsetImportList()
.equals(other.getOpsetImportList())) return false;
if (!getProducerName()
.equals(other.getProducerName())) return false;
if (!getProducerVersion()
.equals(other.getProducerVersion())) return false;
if (!getDomain()
.equals(other.getDomain())) return false;
if (getModelVersion()
!= other.getModelVersion()) return false;
if (!getDocString()
.equals(other.getDocString())) return false;
if (hasGraph() != other.hasGraph()) return false;
if (hasGraph()) {
if (!getGraph()
.equals(other.getGraph())) return false;
}
if (!getMetadataPropsList()
.equals(other.getMetadataPropsList())) return false;
if (!getTrainingInfoList()
.equals(other.getTrainingInfoList())) return false;
if (!getFunctionsList()
.equals(other.getFunctionsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + IR_VERSION_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getIrVersion());
if (getOpsetImportCount() > 0) {
hash = (37 * hash) + OPSET_IMPORT_FIELD_NUMBER;
hash = (53 * hash) + getOpsetImportList().hashCode();
}
hash = (37 * hash) + PRODUCER_NAME_FIELD_NUMBER;
hash = (53 * hash) + getProducerName().hashCode();
hash = (37 * hash) + PRODUCER_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getProducerVersion().hashCode();
hash = (37 * hash) + DOMAIN_FIELD_NUMBER;
hash = (53 * hash) + getDomain().hashCode();
hash = (37 * hash) + MODEL_VERSION_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getModelVersion());
hash = (37 * hash) + DOC_STRING_FIELD_NUMBER;
hash = (53 * hash) + getDocString().hashCode();
if (hasGraph()) {
hash = (37 * hash) + GRAPH_FIELD_NUMBER;
hash = (53 * hash) + getGraph().hashCode();
}
if (getMetadataPropsCount() > 0) {
hash = (37 * hash) + METADATA_PROPS_FIELD_NUMBER;
hash = (53 * hash) + getMetadataPropsList().hashCode();
}
if (getTrainingInfoCount() > 0) {
hash = (37 * hash) + TRAINING_INFO_FIELD_NUMBER;
hash = (53 * hash) + getTrainingInfoList().hashCode();
}
if (getFunctionsCount() > 0) {
hash = (37 * hash) + FUNCTIONS_FIELD_NUMBER;
hash = (53 * hash) + getFunctionsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.ModelProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.ModelProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.ModelProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.ModelProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.ModelProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.ModelProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.ModelProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.ModelProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.ModelProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.ModelProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.ModelProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.ModelProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.ModelProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Models
* ModelProto is a top-level file/container format for bundling a ML model and
* associating its computation graph with metadata.
* The semantics of the model are described by the associated GraphProto's.
*
*
* Protobuf type {@code onnx.ModelProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.ModelProto)
onnx.OnnxMl.ModelProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_ModelProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_ModelProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.ModelProto.class, onnx.OnnxMl.ModelProto.Builder.class);
}
// Construct using onnx.OnnxMl.ModelProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getOpsetImportFieldBuilder();
getMetadataPropsFieldBuilder();
getTrainingInfoFieldBuilder();
getFunctionsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
irVersion_ = 0L;
if (opsetImportBuilder_ == null) {
opsetImport_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
opsetImportBuilder_.clear();
}
producerName_ = "";
producerVersion_ = "";
domain_ = "";
modelVersion_ = 0L;
docString_ = "";
if (graphBuilder_ == null) {
graph_ = null;
} else {
graph_ = null;
graphBuilder_ = null;
}
if (metadataPropsBuilder_ == null) {
metadataProps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
metadataPropsBuilder_.clear();
}
if (trainingInfoBuilder_ == null) {
trainingInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
trainingInfoBuilder_.clear();
}
if (functionsBuilder_ == null) {
functions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
functionsBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_ModelProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.ModelProto getDefaultInstanceForType() {
return onnx.OnnxMl.ModelProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.ModelProto build() {
onnx.OnnxMl.ModelProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.ModelProto buildPartial() {
onnx.OnnxMl.ModelProto result = new onnx.OnnxMl.ModelProto(this);
int from_bitField0_ = bitField0_;
result.irVersion_ = irVersion_;
if (opsetImportBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
opsetImport_ = java.util.Collections.unmodifiableList(opsetImport_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.opsetImport_ = opsetImport_;
} else {
result.opsetImport_ = opsetImportBuilder_.build();
}
result.producerName_ = producerName_;
result.producerVersion_ = producerVersion_;
result.domain_ = domain_;
result.modelVersion_ = modelVersion_;
result.docString_ = docString_;
if (graphBuilder_ == null) {
result.graph_ = graph_;
} else {
result.graph_ = graphBuilder_.build();
}
if (metadataPropsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
metadataProps_ = java.util.Collections.unmodifiableList(metadataProps_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.metadataProps_ = metadataProps_;
} else {
result.metadataProps_ = metadataPropsBuilder_.build();
}
if (trainingInfoBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)) {
trainingInfo_ = java.util.Collections.unmodifiableList(trainingInfo_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.trainingInfo_ = trainingInfo_;
} else {
result.trainingInfo_ = trainingInfoBuilder_.build();
}
if (functionsBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)) {
functions_ = java.util.Collections.unmodifiableList(functions_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.functions_ = functions_;
} else {
result.functions_ = functionsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.ModelProto) {
return mergeFrom((onnx.OnnxMl.ModelProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.ModelProto other) {
if (other == onnx.OnnxMl.ModelProto.getDefaultInstance()) return this;
if (other.getIrVersion() != 0L) {
setIrVersion(other.getIrVersion());
}
if (opsetImportBuilder_ == null) {
if (!other.opsetImport_.isEmpty()) {
if (opsetImport_.isEmpty()) {
opsetImport_ = other.opsetImport_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOpsetImportIsMutable();
opsetImport_.addAll(other.opsetImport_);
}
onChanged();
}
} else {
if (!other.opsetImport_.isEmpty()) {
if (opsetImportBuilder_.isEmpty()) {
opsetImportBuilder_.dispose();
opsetImportBuilder_ = null;
opsetImport_ = other.opsetImport_;
bitField0_ = (bitField0_ & ~0x00000001);
opsetImportBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getOpsetImportFieldBuilder() : null;
} else {
opsetImportBuilder_.addAllMessages(other.opsetImport_);
}
}
}
if (!other.getProducerName().isEmpty()) {
producerName_ = other.producerName_;
onChanged();
}
if (!other.getProducerVersion().isEmpty()) {
producerVersion_ = other.producerVersion_;
onChanged();
}
if (!other.getDomain().isEmpty()) {
domain_ = other.domain_;
onChanged();
}
if (other.getModelVersion() != 0L) {
setModelVersion(other.getModelVersion());
}
if (!other.getDocString().isEmpty()) {
docString_ = other.docString_;
onChanged();
}
if (other.hasGraph()) {
mergeGraph(other.getGraph());
}
if (metadataPropsBuilder_ == null) {
if (!other.metadataProps_.isEmpty()) {
if (metadataProps_.isEmpty()) {
metadataProps_ = other.metadataProps_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureMetadataPropsIsMutable();
metadataProps_.addAll(other.metadataProps_);
}
onChanged();
}
} else {
if (!other.metadataProps_.isEmpty()) {
if (metadataPropsBuilder_.isEmpty()) {
metadataPropsBuilder_.dispose();
metadataPropsBuilder_ = null;
metadataProps_ = other.metadataProps_;
bitField0_ = (bitField0_ & ~0x00000002);
metadataPropsBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getMetadataPropsFieldBuilder() : null;
} else {
metadataPropsBuilder_.addAllMessages(other.metadataProps_);
}
}
}
if (trainingInfoBuilder_ == null) {
if (!other.trainingInfo_.isEmpty()) {
if (trainingInfo_.isEmpty()) {
trainingInfo_ = other.trainingInfo_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureTrainingInfoIsMutable();
trainingInfo_.addAll(other.trainingInfo_);
}
onChanged();
}
} else {
if (!other.trainingInfo_.isEmpty()) {
if (trainingInfoBuilder_.isEmpty()) {
trainingInfoBuilder_.dispose();
trainingInfoBuilder_ = null;
trainingInfo_ = other.trainingInfo_;
bitField0_ = (bitField0_ & ~0x00000004);
trainingInfoBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTrainingInfoFieldBuilder() : null;
} else {
trainingInfoBuilder_.addAllMessages(other.trainingInfo_);
}
}
}
if (functionsBuilder_ == null) {
if (!other.functions_.isEmpty()) {
if (functions_.isEmpty()) {
functions_ = other.functions_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureFunctionsIsMutable();
functions_.addAll(other.functions_);
}
onChanged();
}
} else {
if (!other.functions_.isEmpty()) {
if (functionsBuilder_.isEmpty()) {
functionsBuilder_.dispose();
functionsBuilder_ = null;
functions_ = other.functions_;
bitField0_ = (bitField0_ & ~0x00000008);
functionsBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getFunctionsFieldBuilder() : null;
} else {
functionsBuilder_.addAllMessages(other.functions_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.ModelProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.ModelProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long irVersion_ ;
/**
*
* The version of the IR this model targets. See Version enum above.
* This field MUST be present.
*
*
* int64 ir_version = 1;
* @return The irVersion.
*/
@java.lang.Override
public long getIrVersion() {
return irVersion_;
}
/**
*
* The version of the IR this model targets. See Version enum above.
* This field MUST be present.
*
*
* int64 ir_version = 1;
* @param value The irVersion to set.
* @return This builder for chaining.
*/
public Builder setIrVersion(long value) {
irVersion_ = value;
onChanged();
return this;
}
/**
*
* The version of the IR this model targets. See Version enum above.
* This field MUST be present.
*
*
* int64 ir_version = 1;
* @return This builder for chaining.
*/
public Builder clearIrVersion() {
irVersion_ = 0L;
onChanged();
return this;
}
private java.util.List opsetImport_ =
java.util.Collections.emptyList();
private void ensureOpsetImportIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
opsetImport_ = new java.util.ArrayList(opsetImport_);
bitField0_ |= 0x00000001;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.OperatorSetIdProto, onnx.OnnxMl.OperatorSetIdProto.Builder, onnx.OnnxMl.OperatorSetIdProtoOrBuilder> opsetImportBuilder_;
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public java.util.List getOpsetImportList() {
if (opsetImportBuilder_ == null) {
return java.util.Collections.unmodifiableList(opsetImport_);
} else {
return opsetImportBuilder_.getMessageList();
}
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public int getOpsetImportCount() {
if (opsetImportBuilder_ == null) {
return opsetImport_.size();
} else {
return opsetImportBuilder_.getCount();
}
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public onnx.OnnxMl.OperatorSetIdProto getOpsetImport(int index) {
if (opsetImportBuilder_ == null) {
return opsetImport_.get(index);
} else {
return opsetImportBuilder_.getMessage(index);
}
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder setOpsetImport(
int index, onnx.OnnxMl.OperatorSetIdProto value) {
if (opsetImportBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOpsetImportIsMutable();
opsetImport_.set(index, value);
onChanged();
} else {
opsetImportBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder setOpsetImport(
int index, onnx.OnnxMl.OperatorSetIdProto.Builder builderForValue) {
if (opsetImportBuilder_ == null) {
ensureOpsetImportIsMutable();
opsetImport_.set(index, builderForValue.build());
onChanged();
} else {
opsetImportBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder addOpsetImport(onnx.OnnxMl.OperatorSetIdProto value) {
if (opsetImportBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOpsetImportIsMutable();
opsetImport_.add(value);
onChanged();
} else {
opsetImportBuilder_.addMessage(value);
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder addOpsetImport(
int index, onnx.OnnxMl.OperatorSetIdProto value) {
if (opsetImportBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOpsetImportIsMutable();
opsetImport_.add(index, value);
onChanged();
} else {
opsetImportBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder addOpsetImport(
onnx.OnnxMl.OperatorSetIdProto.Builder builderForValue) {
if (opsetImportBuilder_ == null) {
ensureOpsetImportIsMutable();
opsetImport_.add(builderForValue.build());
onChanged();
} else {
opsetImportBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder addOpsetImport(
int index, onnx.OnnxMl.OperatorSetIdProto.Builder builderForValue) {
if (opsetImportBuilder_ == null) {
ensureOpsetImportIsMutable();
opsetImport_.add(index, builderForValue.build());
onChanged();
} else {
opsetImportBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder addAllOpsetImport(
java.lang.Iterable values) {
if (opsetImportBuilder_ == null) {
ensureOpsetImportIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, opsetImport_);
onChanged();
} else {
opsetImportBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder clearOpsetImport() {
if (opsetImportBuilder_ == null) {
opsetImport_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
opsetImportBuilder_.clear();
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public Builder removeOpsetImport(int index) {
if (opsetImportBuilder_ == null) {
ensureOpsetImportIsMutable();
opsetImport_.remove(index);
onChanged();
} else {
opsetImportBuilder_.remove(index);
}
return this;
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public onnx.OnnxMl.OperatorSetIdProto.Builder getOpsetImportBuilder(
int index) {
return getOpsetImportFieldBuilder().getBuilder(index);
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public onnx.OnnxMl.OperatorSetIdProtoOrBuilder getOpsetImportOrBuilder(
int index) {
if (opsetImportBuilder_ == null) {
return opsetImport_.get(index); } else {
return opsetImportBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public java.util.List
getOpsetImportOrBuilderList() {
if (opsetImportBuilder_ != null) {
return opsetImportBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(opsetImport_);
}
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public onnx.OnnxMl.OperatorSetIdProto.Builder addOpsetImportBuilder() {
return getOpsetImportFieldBuilder().addBuilder(
onnx.OnnxMl.OperatorSetIdProto.getDefaultInstance());
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public onnx.OnnxMl.OperatorSetIdProto.Builder addOpsetImportBuilder(
int index) {
return getOpsetImportFieldBuilder().addBuilder(
index, onnx.OnnxMl.OperatorSetIdProto.getDefaultInstance());
}
/**
*
* The OperatorSets this model relies on.
* All ModelProtos MUST have at least one entry that
* specifies which version of the ONNX OperatorSet is
* being imported.
* All nodes in the ModelProto's graph will bind against the operator
* with the same-domain/same-op_type operator with the HIGHEST version
* in the referenced operator sets.
*
*
* repeated .onnx.OperatorSetIdProto opset_import = 8;
*/
public java.util.List
getOpsetImportBuilderList() {
return getOpsetImportFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.OperatorSetIdProto, onnx.OnnxMl.OperatorSetIdProto.Builder, onnx.OnnxMl.OperatorSetIdProtoOrBuilder>
getOpsetImportFieldBuilder() {
if (opsetImportBuilder_ == null) {
opsetImportBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.OperatorSetIdProto, onnx.OnnxMl.OperatorSetIdProto.Builder, onnx.OnnxMl.OperatorSetIdProtoOrBuilder>(
opsetImport_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
opsetImport_ = null;
}
return opsetImportBuilder_;
}
private java.lang.Object producerName_ = "";
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return The producerName.
*/
public java.lang.String getProducerName() {
java.lang.Object ref = producerName_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
producerName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return The bytes for producerName.
*/
public org.nd4j.shade.protobuf.ByteString
getProducerNameBytes() {
java.lang.Object ref = producerName_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
producerName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @param value The producerName to set.
* @return This builder for chaining.
*/
public Builder setProducerName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
producerName_ = value;
onChanged();
return this;
}
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @return This builder for chaining.
*/
public Builder clearProducerName() {
producerName_ = getDefaultInstance().getProducerName();
onChanged();
return this;
}
/**
*
* The name of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_name = 2;
* @param value The bytes for producerName to set.
* @return This builder for chaining.
*/
public Builder setProducerNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
producerName_ = value;
onChanged();
return this;
}
private java.lang.Object producerVersion_ = "";
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return The producerVersion.
*/
public java.lang.String getProducerVersion() {
java.lang.Object ref = producerVersion_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
producerVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return The bytes for producerVersion.
*/
public org.nd4j.shade.protobuf.ByteString
getProducerVersionBytes() {
java.lang.Object ref = producerVersion_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
producerVersion_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @param value The producerVersion to set.
* @return This builder for chaining.
*/
public Builder setProducerVersion(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
producerVersion_ = value;
onChanged();
return this;
}
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @return This builder for chaining.
*/
public Builder clearProducerVersion() {
producerVersion_ = getDefaultInstance().getProducerVersion();
onChanged();
return this;
}
/**
*
* The version of the framework or tool used to generate this model.
* This field SHOULD be present to indicate which implementation/tool/framework
* emitted the model.
*
*
* string producer_version = 3;
* @param value The bytes for producerVersion to set.
* @return This builder for chaining.
*/
public Builder setProducerVersionBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
producerVersion_ = value;
onChanged();
return this;
}
private java.lang.Object domain_ = "";
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return The domain.
*/
public java.lang.String getDomain() {
java.lang.Object ref = domain_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
domain_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return The bytes for domain.
*/
public org.nd4j.shade.protobuf.ByteString
getDomainBytes() {
java.lang.Object ref = domain_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
domain_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @param value The domain to set.
* @return This builder for chaining.
*/
public Builder setDomain(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
domain_ = value;
onChanged();
return this;
}
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @return This builder for chaining.
*/
public Builder clearDomain() {
domain_ = getDefaultInstance().getDomain();
onChanged();
return this;
}
/**
*
* Domain name of the model.
* We use reverse domain names as name space indicators. For example:
* `com.facebook.fair` or `com.microsoft.cognitiveservices`
* Together with `model_version` and GraphProto.name, this forms the unique identity of
* the graph.
*
*
* string domain = 4;
* @param value The bytes for domain to set.
* @return This builder for chaining.
*/
public Builder setDomainBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
domain_ = value;
onChanged();
return this;
}
private long modelVersion_ ;
/**
*
* The version of the graph encoded. See Version enum below.
*
*
* int64 model_version = 5;
* @return The modelVersion.
*/
@java.lang.Override
public long getModelVersion() {
return modelVersion_;
}
/**
*
* The version of the graph encoded. See Version enum below.
*
*
* int64 model_version = 5;
* @param value The modelVersion to set.
* @return This builder for chaining.
*/
public Builder setModelVersion(long value) {
modelVersion_ = value;
onChanged();
return this;
}
/**
*
* The version of the graph encoded. See Version enum below.
*
*
* int64 model_version = 5;
* @return This builder for chaining.
*/
public Builder clearModelVersion() {
modelVersion_ = 0L;
onChanged();
return this;
}
private java.lang.Object docString_ = "";
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The docString.
*/
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return The bytes for docString.
*/
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @param value The docString to set.
* @return This builder for chaining.
*/
public Builder setDocString(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
docString_ = value;
onChanged();
return this;
}
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @return This builder for chaining.
*/
public Builder clearDocString() {
docString_ = getDefaultInstance().getDocString();
onChanged();
return this;
}
/**
*
* A human-readable documentation for this model. Markdown is allowed.
*
*
* string doc_string = 6;
* @param value The bytes for docString to set.
* @return This builder for chaining.
*/
public Builder setDocStringBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
docString_ = value;
onChanged();
return this;
}
private onnx.OnnxMl.GraphProto graph_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder> graphBuilder_;
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
* @return Whether the graph field is set.
*/
public boolean hasGraph() {
return graphBuilder_ != null || graph_ != null;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
* @return The graph.
*/
public onnx.OnnxMl.GraphProto getGraph() {
if (graphBuilder_ == null) {
return graph_ == null ? onnx.OnnxMl.GraphProto.getDefaultInstance() : graph_;
} else {
return graphBuilder_.getMessage();
}
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
public Builder setGraph(onnx.OnnxMl.GraphProto value) {
if (graphBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
graph_ = value;
onChanged();
} else {
graphBuilder_.setMessage(value);
}
return this;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
public Builder setGraph(
onnx.OnnxMl.GraphProto.Builder builderForValue) {
if (graphBuilder_ == null) {
graph_ = builderForValue.build();
onChanged();
} else {
graphBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
public Builder mergeGraph(onnx.OnnxMl.GraphProto value) {
if (graphBuilder_ == null) {
if (graph_ != null) {
graph_ =
onnx.OnnxMl.GraphProto.newBuilder(graph_).mergeFrom(value).buildPartial();
} else {
graph_ = value;
}
onChanged();
} else {
graphBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
public Builder clearGraph() {
if (graphBuilder_ == null) {
graph_ = null;
onChanged();
} else {
graph_ = null;
graphBuilder_ = null;
}
return this;
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
public onnx.OnnxMl.GraphProto.Builder getGraphBuilder() {
onChanged();
return getGraphFieldBuilder().getBuilder();
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
public onnx.OnnxMl.GraphProtoOrBuilder getGraphOrBuilder() {
if (graphBuilder_ != null) {
return graphBuilder_.getMessageOrBuilder();
} else {
return graph_ == null ?
onnx.OnnxMl.GraphProto.getDefaultInstance() : graph_;
}
}
/**
*
* The parameterized graph that is evaluated to execute the model.
*
*
* .onnx.GraphProto graph = 7;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>
getGraphFieldBuilder() {
if (graphBuilder_ == null) {
graphBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.GraphProto, onnx.OnnxMl.GraphProto.Builder, onnx.OnnxMl.GraphProtoOrBuilder>(
getGraph(),
getParentForChildren(),
isClean());
graph_ = null;
}
return graphBuilder_;
}
private java.util.List metadataProps_ =
java.util.Collections.emptyList();
private void ensureMetadataPropsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
metadataProps_ = new java.util.ArrayList(metadataProps_);
bitField0_ |= 0x00000002;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder> metadataPropsBuilder_;
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public java.util.List getMetadataPropsList() {
if (metadataPropsBuilder_ == null) {
return java.util.Collections.unmodifiableList(metadataProps_);
} else {
return metadataPropsBuilder_.getMessageList();
}
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public int getMetadataPropsCount() {
if (metadataPropsBuilder_ == null) {
return metadataProps_.size();
} else {
return metadataPropsBuilder_.getCount();
}
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public onnx.OnnxMl.StringStringEntryProto getMetadataProps(int index) {
if (metadataPropsBuilder_ == null) {
return metadataProps_.get(index);
} else {
return metadataPropsBuilder_.getMessage(index);
}
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder setMetadataProps(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (metadataPropsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMetadataPropsIsMutable();
metadataProps_.set(index, value);
onChanged();
} else {
metadataPropsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder setMetadataProps(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (metadataPropsBuilder_ == null) {
ensureMetadataPropsIsMutable();
metadataProps_.set(index, builderForValue.build());
onChanged();
} else {
metadataPropsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder addMetadataProps(onnx.OnnxMl.StringStringEntryProto value) {
if (metadataPropsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMetadataPropsIsMutable();
metadataProps_.add(value);
onChanged();
} else {
metadataPropsBuilder_.addMessage(value);
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder addMetadataProps(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (metadataPropsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMetadataPropsIsMutable();
metadataProps_.add(index, value);
onChanged();
} else {
metadataPropsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder addMetadataProps(
onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (metadataPropsBuilder_ == null) {
ensureMetadataPropsIsMutable();
metadataProps_.add(builderForValue.build());
onChanged();
} else {
metadataPropsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder addMetadataProps(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (metadataPropsBuilder_ == null) {
ensureMetadataPropsIsMutable();
metadataProps_.add(index, builderForValue.build());
onChanged();
} else {
metadataPropsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder addAllMetadataProps(
java.lang.Iterable values) {
if (metadataPropsBuilder_ == null) {
ensureMetadataPropsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, metadataProps_);
onChanged();
} else {
metadataPropsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder clearMetadataProps() {
if (metadataPropsBuilder_ == null) {
metadataProps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
metadataPropsBuilder_.clear();
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public Builder removeMetadataProps(int index) {
if (metadataPropsBuilder_ == null) {
ensureMetadataPropsIsMutable();
metadataProps_.remove(index);
onChanged();
} else {
metadataPropsBuilder_.remove(index);
}
return this;
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder getMetadataPropsBuilder(
int index) {
return getMetadataPropsFieldBuilder().getBuilder(index);
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getMetadataPropsOrBuilder(
int index) {
if (metadataPropsBuilder_ == null) {
return metadataProps_.get(index); } else {
return metadataPropsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public java.util.List
getMetadataPropsOrBuilderList() {
if (metadataPropsBuilder_ != null) {
return metadataPropsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(metadataProps_);
}
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addMetadataPropsBuilder() {
return getMetadataPropsFieldBuilder().addBuilder(
onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addMetadataPropsBuilder(
int index) {
return getMetadataPropsFieldBuilder().addBuilder(
index, onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* Named metadata values; keys should be distinct.
*
*
* repeated .onnx.StringStringEntryProto metadata_props = 14;
*/
public java.util.List
getMetadataPropsBuilderList() {
return getMetadataPropsFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>
getMetadataPropsFieldBuilder() {
if (metadataPropsBuilder_ == null) {
metadataPropsBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>(
metadataProps_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
metadataProps_ = null;
}
return metadataPropsBuilder_;
}
private java.util.List trainingInfo_ =
java.util.Collections.emptyList();
private void ensureTrainingInfoIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
trainingInfo_ = new java.util.ArrayList(trainingInfo_);
bitField0_ |= 0x00000004;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TrainingInfoProto, onnx.OnnxMl.TrainingInfoProto.Builder, onnx.OnnxMl.TrainingInfoProtoOrBuilder> trainingInfoBuilder_;
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public java.util.List getTrainingInfoList() {
if (trainingInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(trainingInfo_);
} else {
return trainingInfoBuilder_.getMessageList();
}
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public int getTrainingInfoCount() {
if (trainingInfoBuilder_ == null) {
return trainingInfo_.size();
} else {
return trainingInfoBuilder_.getCount();
}
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public onnx.OnnxMl.TrainingInfoProto getTrainingInfo(int index) {
if (trainingInfoBuilder_ == null) {
return trainingInfo_.get(index);
} else {
return trainingInfoBuilder_.getMessage(index);
}
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder setTrainingInfo(
int index, onnx.OnnxMl.TrainingInfoProto value) {
if (trainingInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTrainingInfoIsMutable();
trainingInfo_.set(index, value);
onChanged();
} else {
trainingInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder setTrainingInfo(
int index, onnx.OnnxMl.TrainingInfoProto.Builder builderForValue) {
if (trainingInfoBuilder_ == null) {
ensureTrainingInfoIsMutable();
trainingInfo_.set(index, builderForValue.build());
onChanged();
} else {
trainingInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder addTrainingInfo(onnx.OnnxMl.TrainingInfoProto value) {
if (trainingInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTrainingInfoIsMutable();
trainingInfo_.add(value);
onChanged();
} else {
trainingInfoBuilder_.addMessage(value);
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder addTrainingInfo(
int index, onnx.OnnxMl.TrainingInfoProto value) {
if (trainingInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTrainingInfoIsMutable();
trainingInfo_.add(index, value);
onChanged();
} else {
trainingInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder addTrainingInfo(
onnx.OnnxMl.TrainingInfoProto.Builder builderForValue) {
if (trainingInfoBuilder_ == null) {
ensureTrainingInfoIsMutable();
trainingInfo_.add(builderForValue.build());
onChanged();
} else {
trainingInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder addTrainingInfo(
int index, onnx.OnnxMl.TrainingInfoProto.Builder builderForValue) {
if (trainingInfoBuilder_ == null) {
ensureTrainingInfoIsMutable();
trainingInfo_.add(index, builderForValue.build());
onChanged();
} else {
trainingInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder addAllTrainingInfo(
java.lang.Iterable values) {
if (trainingInfoBuilder_ == null) {
ensureTrainingInfoIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, trainingInfo_);
onChanged();
} else {
trainingInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder clearTrainingInfo() {
if (trainingInfoBuilder_ == null) {
trainingInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
trainingInfoBuilder_.clear();
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public Builder removeTrainingInfo(int index) {
if (trainingInfoBuilder_ == null) {
ensureTrainingInfoIsMutable();
trainingInfo_.remove(index);
onChanged();
} else {
trainingInfoBuilder_.remove(index);
}
return this;
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public onnx.OnnxMl.TrainingInfoProto.Builder getTrainingInfoBuilder(
int index) {
return getTrainingInfoFieldBuilder().getBuilder(index);
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public onnx.OnnxMl.TrainingInfoProtoOrBuilder getTrainingInfoOrBuilder(
int index) {
if (trainingInfoBuilder_ == null) {
return trainingInfo_.get(index); } else {
return trainingInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public java.util.List
getTrainingInfoOrBuilderList() {
if (trainingInfoBuilder_ != null) {
return trainingInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(trainingInfo_);
}
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public onnx.OnnxMl.TrainingInfoProto.Builder addTrainingInfoBuilder() {
return getTrainingInfoFieldBuilder().addBuilder(
onnx.OnnxMl.TrainingInfoProto.getDefaultInstance());
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public onnx.OnnxMl.TrainingInfoProto.Builder addTrainingInfoBuilder(
int index) {
return getTrainingInfoFieldBuilder().addBuilder(
index, onnx.OnnxMl.TrainingInfoProto.getDefaultInstance());
}
/**
*
* Training-specific information. Sequentially executing all stored
* `TrainingInfoProto.algorithm`s and assigning their outputs following
* the corresponding `TrainingInfoProto.update_binding`s is one training
* iteration. Similarly, to initialize the model
* (as if training hasn't happened), the user should sequentially execute
* all stored `TrainingInfoProto.initialization`s and assigns their outputs
* using `TrainingInfoProto.initialization_binding`s.
* If this field is empty, the training behavior of the model is undefined.
*
*
* repeated .onnx.TrainingInfoProto training_info = 20;
*/
public java.util.List
getTrainingInfoBuilderList() {
return getTrainingInfoFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TrainingInfoProto, onnx.OnnxMl.TrainingInfoProto.Builder, onnx.OnnxMl.TrainingInfoProtoOrBuilder>
getTrainingInfoFieldBuilder() {
if (trainingInfoBuilder_ == null) {
trainingInfoBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TrainingInfoProto, onnx.OnnxMl.TrainingInfoProto.Builder, onnx.OnnxMl.TrainingInfoProtoOrBuilder>(
trainingInfo_,
((bitField0_ & 0x00000004) != 0),
getParentForChildren(),
isClean());
trainingInfo_ = null;
}
return trainingInfoBuilder_;
}
private java.util.List functions_ =
java.util.Collections.emptyList();
private void ensureFunctionsIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
functions_ = new java.util.ArrayList(functions_);
bitField0_ |= 0x00000008;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.FunctionProto, onnx.OnnxMl.FunctionProto.Builder, onnx.OnnxMl.FunctionProtoOrBuilder> functionsBuilder_;
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public java.util.List getFunctionsList() {
if (functionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(functions_);
} else {
return functionsBuilder_.getMessageList();
}
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public int getFunctionsCount() {
if (functionsBuilder_ == null) {
return functions_.size();
} else {
return functionsBuilder_.getCount();
}
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public onnx.OnnxMl.FunctionProto getFunctions(int index) {
if (functionsBuilder_ == null) {
return functions_.get(index);
} else {
return functionsBuilder_.getMessage(index);
}
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder setFunctions(
int index, onnx.OnnxMl.FunctionProto value) {
if (functionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionsIsMutable();
functions_.set(index, value);
onChanged();
} else {
functionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder setFunctions(
int index, onnx.OnnxMl.FunctionProto.Builder builderForValue) {
if (functionsBuilder_ == null) {
ensureFunctionsIsMutable();
functions_.set(index, builderForValue.build());
onChanged();
} else {
functionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder addFunctions(onnx.OnnxMl.FunctionProto value) {
if (functionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionsIsMutable();
functions_.add(value);
onChanged();
} else {
functionsBuilder_.addMessage(value);
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder addFunctions(
int index, onnx.OnnxMl.FunctionProto value) {
if (functionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionsIsMutable();
functions_.add(index, value);
onChanged();
} else {
functionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder addFunctions(
onnx.OnnxMl.FunctionProto.Builder builderForValue) {
if (functionsBuilder_ == null) {
ensureFunctionsIsMutable();
functions_.add(builderForValue.build());
onChanged();
} else {
functionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder addFunctions(
int index, onnx.OnnxMl.FunctionProto.Builder builderForValue) {
if (functionsBuilder_ == null) {
ensureFunctionsIsMutable();
functions_.add(index, builderForValue.build());
onChanged();
} else {
functionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder addAllFunctions(
java.lang.Iterable values) {
if (functionsBuilder_ == null) {
ensureFunctionsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, functions_);
onChanged();
} else {
functionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder clearFunctions() {
if (functionsBuilder_ == null) {
functions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
functionsBuilder_.clear();
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public Builder removeFunctions(int index) {
if (functionsBuilder_ == null) {
ensureFunctionsIsMutable();
functions_.remove(index);
onChanged();
} else {
functionsBuilder_.remove(index);
}
return this;
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public onnx.OnnxMl.FunctionProto.Builder getFunctionsBuilder(
int index) {
return getFunctionsFieldBuilder().getBuilder(index);
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public onnx.OnnxMl.FunctionProtoOrBuilder getFunctionsOrBuilder(
int index) {
if (functionsBuilder_ == null) {
return functions_.get(index); } else {
return functionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public java.util.List
getFunctionsOrBuilderList() {
if (functionsBuilder_ != null) {
return functionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(functions_);
}
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public onnx.OnnxMl.FunctionProto.Builder addFunctionsBuilder() {
return getFunctionsFieldBuilder().addBuilder(
onnx.OnnxMl.FunctionProto.getDefaultInstance());
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public onnx.OnnxMl.FunctionProto.Builder addFunctionsBuilder(
int index) {
return getFunctionsFieldBuilder().addBuilder(
index, onnx.OnnxMl.FunctionProto.getDefaultInstance());
}
/**
*
* A list of function protos local to the model.
* Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
* In case of any conflicts the behavior (whether the model local functions are given higher priority,
* or standard opserator sets are given higher priotity or this is treated as error) is defined by
* the runtimes.
* The operator sets imported by FunctionProto should be compatible with the ones
* imported by ModelProto and other model local FunctionProtos.
* Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
* or by 2 FunctionProtos then versions for the operator set may be different but,
* the operator schema returned for op_type, domain, version combination
* for both the versions should be same for every node in the function body.
* One FunctionProto can reference other FunctionProto in the model, however, recursive reference
* is not allowed.
*
*
* repeated .onnx.FunctionProto functions = 25;
*/
public java.util.List
getFunctionsBuilderList() {
return getFunctionsFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.FunctionProto, onnx.OnnxMl.FunctionProto.Builder, onnx.OnnxMl.FunctionProtoOrBuilder>
getFunctionsFieldBuilder() {
if (functionsBuilder_ == null) {
functionsBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.FunctionProto, onnx.OnnxMl.FunctionProto.Builder, onnx.OnnxMl.FunctionProtoOrBuilder>(
functions_,
((bitField0_ & 0x00000008) != 0),
getParentForChildren(),
isClean());
functions_ = null;
}
return functionsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.ModelProto)
}
// @@protoc_insertion_point(class_scope:onnx.ModelProto)
private static final onnx.OnnxMl.ModelProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.ModelProto();
}
public static onnx.OnnxMl.ModelProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public ModelProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new ModelProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.ModelProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface StringStringEntryProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.StringStringEntryProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* string key = 1;
* @return The key.
*/
java.lang.String getKey();
/**
* string key = 1;
* @return The bytes for key.
*/
org.nd4j.shade.protobuf.ByteString
getKeyBytes();
/**
* string value = 2;
* @return The value.
*/
java.lang.String getValue();
/**
* string value = 2;
* @return The bytes for value.
*/
org.nd4j.shade.protobuf.ByteString
getValueBytes();
}
/**
*
* StringStringEntryProto follows the pattern for cross-proto-version maps.
* See https://developers.google.com/protocol-buffers/docs/proto3#maps
*
*
* Protobuf type {@code onnx.StringStringEntryProto}
*/
public static final class StringStringEntryProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.StringStringEntryProto)
StringStringEntryProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use StringStringEntryProto.newBuilder() to construct.
private StringStringEntryProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private StringStringEntryProto() {
key_ = "";
value_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new StringStringEntryProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StringStringEntryProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
key_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
value_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_StringStringEntryProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_StringStringEntryProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.StringStringEntryProto.class, onnx.OnnxMl.StringStringEntryProto.Builder.class);
}
public static final int KEY_FIELD_NUMBER = 1;
private volatile java.lang.Object key_;
/**
* string key = 1;
* @return The key.
*/
@java.lang.Override
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
}
}
/**
* string key = 1;
* @return The bytes for key.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int VALUE_FIELD_NUMBER = 2;
private volatile java.lang.Object value_;
/**
* string value = 2;
* @return The value.
*/
@java.lang.Override
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
}
}
/**
* string value = 2;
* @return The bytes for value.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(value_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, value_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(key_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(value_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, value_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.StringStringEntryProto)) {
return super.equals(obj);
}
onnx.OnnxMl.StringStringEntryProto other = (onnx.OnnxMl.StringStringEntryProto) obj;
if (!getKey()
.equals(other.getKey())) return false;
if (!getValue()
.equals(other.getValue())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.StringStringEntryProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.StringStringEntryProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.StringStringEntryProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* StringStringEntryProto follows the pattern for cross-proto-version maps.
* See https://developers.google.com/protocol-buffers/docs/proto3#maps
*
*
* Protobuf type {@code onnx.StringStringEntryProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.StringStringEntryProto)
onnx.OnnxMl.StringStringEntryProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_StringStringEntryProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_StringStringEntryProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.StringStringEntryProto.class, onnx.OnnxMl.StringStringEntryProto.Builder.class);
}
// Construct using onnx.OnnxMl.StringStringEntryProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
key_ = "";
value_ = "";
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_StringStringEntryProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getDefaultInstanceForType() {
return onnx.OnnxMl.StringStringEntryProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto build() {
onnx.OnnxMl.StringStringEntryProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto buildPartial() {
onnx.OnnxMl.StringStringEntryProto result = new onnx.OnnxMl.StringStringEntryProto(this);
result.key_ = key_;
result.value_ = value_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.StringStringEntryProto) {
return mergeFrom((onnx.OnnxMl.StringStringEntryProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.StringStringEntryProto other) {
if (other == onnx.OnnxMl.StringStringEntryProto.getDefaultInstance()) return this;
if (!other.getKey().isEmpty()) {
key_ = other.key_;
onChanged();
}
if (!other.getValue().isEmpty()) {
value_ = other.value_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.StringStringEntryProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.StringStringEntryProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object key_ = "";
/**
* string key = 1;
* @return The key.
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string key = 1;
* @return The bytes for key.
*/
public org.nd4j.shade.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string key = 1;
* @param value The key to set.
* @return This builder for chaining.
*/
public Builder setKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
onChanged();
return this;
}
/**
* string key = 1;
* @return This builder for chaining.
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
onChanged();
return this;
}
/**
* string key = 1;
* @param value The bytes for key to set.
* @return This builder for chaining.
*/
public Builder setKeyBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
key_ = value;
onChanged();
return this;
}
private java.lang.Object value_ = "";
/**
* string value = 2;
* @return The value.
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string value = 2;
* @return The bytes for value.
*/
public org.nd4j.shade.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string value = 2;
* @param value The value to set.
* @return This builder for chaining.
*/
public Builder setValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
onChanged();
return this;
}
/**
* string value = 2;
* @return This builder for chaining.
*/
public Builder clearValue() {
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* string value = 2;
* @param value The bytes for value to set.
* @return This builder for chaining.
*/
public Builder setValueBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
value_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.StringStringEntryProto)
}
// @@protoc_insertion_point(class_scope:onnx.StringStringEntryProto)
private static final onnx.OnnxMl.StringStringEntryProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.StringStringEntryProto();
}
public static onnx.OnnxMl.StringStringEntryProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public StringStringEntryProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new StringStringEntryProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface TensorAnnotationOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.TensorAnnotation)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* string tensor_name = 1;
* @return The tensorName.
*/
java.lang.String getTensorName();
/**
* string tensor_name = 1;
* @return The bytes for tensorName.
*/
org.nd4j.shade.protobuf.ByteString
getTensorNameBytes();
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
java.util.List
getQuantParameterTensorNamesList();
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
onnx.OnnxMl.StringStringEntryProto getQuantParameterTensorNames(int index);
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
int getQuantParameterTensorNamesCount();
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
java.util.List
getQuantParameterTensorNamesOrBuilderList();
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
onnx.OnnxMl.StringStringEntryProtoOrBuilder getQuantParameterTensorNamesOrBuilder(
int index);
}
/**
* Protobuf type {@code onnx.TensorAnnotation}
*/
public static final class TensorAnnotation extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.TensorAnnotation)
TensorAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use TensorAnnotation.newBuilder() to construct.
private TensorAnnotation(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private TensorAnnotation() {
tensorName_ = "";
quantParameterTensorNames_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TensorAnnotation();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TensorAnnotation(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
tensorName_ = s;
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
quantParameterTensorNames_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
quantParameterTensorNames_.add(
input.readMessage(onnx.OnnxMl.StringStringEntryProto.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
quantParameterTensorNames_ = java.util.Collections.unmodifiableList(quantParameterTensorNames_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TensorAnnotation_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TensorAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TensorAnnotation.class, onnx.OnnxMl.TensorAnnotation.Builder.class);
}
public static final int TENSOR_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object tensorName_;
/**
* string tensor_name = 1;
* @return The tensorName.
*/
@java.lang.Override
public java.lang.String getTensorName() {
java.lang.Object ref = tensorName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tensorName_ = s;
return s;
}
}
/**
* string tensor_name = 1;
* @return The bytes for tensorName.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getTensorNameBytes() {
java.lang.Object ref = tensorName_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tensorName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int QUANT_PARAMETER_TENSOR_NAMES_FIELD_NUMBER = 2;
private java.util.List quantParameterTensorNames_;
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
@java.lang.Override
public java.util.List getQuantParameterTensorNamesList() {
return quantParameterTensorNames_;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
@java.lang.Override
public java.util.List
getQuantParameterTensorNamesOrBuilderList() {
return quantParameterTensorNames_;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
@java.lang.Override
public int getQuantParameterTensorNamesCount() {
return quantParameterTensorNames_.size();
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getQuantParameterTensorNames(int index) {
return quantParameterTensorNames_.get(index);
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getQuantParameterTensorNamesOrBuilder(
int index) {
return quantParameterTensorNames_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(tensorName_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, tensorName_);
}
for (int i = 0; i < quantParameterTensorNames_.size(); i++) {
output.writeMessage(2, quantParameterTensorNames_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(tensorName_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, tensorName_);
}
for (int i = 0; i < quantParameterTensorNames_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(2, quantParameterTensorNames_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.TensorAnnotation)) {
return super.equals(obj);
}
onnx.OnnxMl.TensorAnnotation other = (onnx.OnnxMl.TensorAnnotation) obj;
if (!getTensorName()
.equals(other.getTensorName())) return false;
if (!getQuantParameterTensorNamesList()
.equals(other.getQuantParameterTensorNamesList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TENSOR_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTensorName().hashCode();
if (getQuantParameterTensorNamesCount() > 0) {
hash = (37 * hash) + QUANT_PARAMETER_TENSOR_NAMES_FIELD_NUMBER;
hash = (53 * hash) + getQuantParameterTensorNamesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TensorAnnotation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorAnnotation parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorAnnotation parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.TensorAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code onnx.TensorAnnotation}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.TensorAnnotation)
onnx.OnnxMl.TensorAnnotationOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TensorAnnotation_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TensorAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TensorAnnotation.class, onnx.OnnxMl.TensorAnnotation.Builder.class);
}
// Construct using onnx.OnnxMl.TensorAnnotation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getQuantParameterTensorNamesFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
tensorName_ = "";
if (quantParameterTensorNamesBuilder_ == null) {
quantParameterTensorNames_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
quantParameterTensorNamesBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_TensorAnnotation_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.TensorAnnotation getDefaultInstanceForType() {
return onnx.OnnxMl.TensorAnnotation.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.TensorAnnotation build() {
onnx.OnnxMl.TensorAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.TensorAnnotation buildPartial() {
onnx.OnnxMl.TensorAnnotation result = new onnx.OnnxMl.TensorAnnotation(this);
int from_bitField0_ = bitField0_;
result.tensorName_ = tensorName_;
if (quantParameterTensorNamesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
quantParameterTensorNames_ = java.util.Collections.unmodifiableList(quantParameterTensorNames_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.quantParameterTensorNames_ = quantParameterTensorNames_;
} else {
result.quantParameterTensorNames_ = quantParameterTensorNamesBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.TensorAnnotation) {
return mergeFrom((onnx.OnnxMl.TensorAnnotation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.TensorAnnotation other) {
if (other == onnx.OnnxMl.TensorAnnotation.getDefaultInstance()) return this;
if (!other.getTensorName().isEmpty()) {
tensorName_ = other.tensorName_;
onChanged();
}
if (quantParameterTensorNamesBuilder_ == null) {
if (!other.quantParameterTensorNames_.isEmpty()) {
if (quantParameterTensorNames_.isEmpty()) {
quantParameterTensorNames_ = other.quantParameterTensorNames_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.addAll(other.quantParameterTensorNames_);
}
onChanged();
}
} else {
if (!other.quantParameterTensorNames_.isEmpty()) {
if (quantParameterTensorNamesBuilder_.isEmpty()) {
quantParameterTensorNamesBuilder_.dispose();
quantParameterTensorNamesBuilder_ = null;
quantParameterTensorNames_ = other.quantParameterTensorNames_;
bitField0_ = (bitField0_ & ~0x00000001);
quantParameterTensorNamesBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getQuantParameterTensorNamesFieldBuilder() : null;
} else {
quantParameterTensorNamesBuilder_.addAllMessages(other.quantParameterTensorNames_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.TensorAnnotation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.TensorAnnotation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object tensorName_ = "";
/**
* string tensor_name = 1;
* @return The tensorName.
*/
public java.lang.String getTensorName() {
java.lang.Object ref = tensorName_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tensorName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string tensor_name = 1;
* @return The bytes for tensorName.
*/
public org.nd4j.shade.protobuf.ByteString
getTensorNameBytes() {
java.lang.Object ref = tensorName_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tensorName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string tensor_name = 1;
* @param value The tensorName to set.
* @return This builder for chaining.
*/
public Builder setTensorName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tensorName_ = value;
onChanged();
return this;
}
/**
* string tensor_name = 1;
* @return This builder for chaining.
*/
public Builder clearTensorName() {
tensorName_ = getDefaultInstance().getTensorName();
onChanged();
return this;
}
/**
* string tensor_name = 1;
* @param value The bytes for tensorName to set.
* @return This builder for chaining.
*/
public Builder setTensorNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tensorName_ = value;
onChanged();
return this;
}
private java.util.List quantParameterTensorNames_ =
java.util.Collections.emptyList();
private void ensureQuantParameterTensorNamesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
quantParameterTensorNames_ = new java.util.ArrayList(quantParameterTensorNames_);
bitField0_ |= 0x00000001;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder> quantParameterTensorNamesBuilder_;
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public java.util.List getQuantParameterTensorNamesList() {
if (quantParameterTensorNamesBuilder_ == null) {
return java.util.Collections.unmodifiableList(quantParameterTensorNames_);
} else {
return quantParameterTensorNamesBuilder_.getMessageList();
}
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public int getQuantParameterTensorNamesCount() {
if (quantParameterTensorNamesBuilder_ == null) {
return quantParameterTensorNames_.size();
} else {
return quantParameterTensorNamesBuilder_.getCount();
}
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public onnx.OnnxMl.StringStringEntryProto getQuantParameterTensorNames(int index) {
if (quantParameterTensorNamesBuilder_ == null) {
return quantParameterTensorNames_.get(index);
} else {
return quantParameterTensorNamesBuilder_.getMessage(index);
}
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder setQuantParameterTensorNames(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (quantParameterTensorNamesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.set(index, value);
onChanged();
} else {
quantParameterTensorNamesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder setQuantParameterTensorNames(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (quantParameterTensorNamesBuilder_ == null) {
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.set(index, builderForValue.build());
onChanged();
} else {
quantParameterTensorNamesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder addQuantParameterTensorNames(onnx.OnnxMl.StringStringEntryProto value) {
if (quantParameterTensorNamesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.add(value);
onChanged();
} else {
quantParameterTensorNamesBuilder_.addMessage(value);
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder addQuantParameterTensorNames(
int index, onnx.OnnxMl.StringStringEntryProto value) {
if (quantParameterTensorNamesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.add(index, value);
onChanged();
} else {
quantParameterTensorNamesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder addQuantParameterTensorNames(
onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (quantParameterTensorNamesBuilder_ == null) {
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.add(builderForValue.build());
onChanged();
} else {
quantParameterTensorNamesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder addQuantParameterTensorNames(
int index, onnx.OnnxMl.StringStringEntryProto.Builder builderForValue) {
if (quantParameterTensorNamesBuilder_ == null) {
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.add(index, builderForValue.build());
onChanged();
} else {
quantParameterTensorNamesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder addAllQuantParameterTensorNames(
java.lang.Iterable values) {
if (quantParameterTensorNamesBuilder_ == null) {
ensureQuantParameterTensorNamesIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, quantParameterTensorNames_);
onChanged();
} else {
quantParameterTensorNamesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder clearQuantParameterTensorNames() {
if (quantParameterTensorNamesBuilder_ == null) {
quantParameterTensorNames_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
quantParameterTensorNamesBuilder_.clear();
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public Builder removeQuantParameterTensorNames(int index) {
if (quantParameterTensorNamesBuilder_ == null) {
ensureQuantParameterTensorNamesIsMutable();
quantParameterTensorNames_.remove(index);
onChanged();
} else {
quantParameterTensorNamesBuilder_.remove(index);
}
return this;
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder getQuantParameterTensorNamesBuilder(
int index) {
return getQuantParameterTensorNamesFieldBuilder().getBuilder(index);
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getQuantParameterTensorNamesOrBuilder(
int index) {
if (quantParameterTensorNamesBuilder_ == null) {
return quantParameterTensorNames_.get(index); } else {
return quantParameterTensorNamesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public java.util.List
getQuantParameterTensorNamesOrBuilderList() {
if (quantParameterTensorNamesBuilder_ != null) {
return quantParameterTensorNamesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(quantParameterTensorNames_);
}
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addQuantParameterTensorNamesBuilder() {
return getQuantParameterTensorNamesFieldBuilder().addBuilder(
onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public onnx.OnnxMl.StringStringEntryProto.Builder addQuantParameterTensorNamesBuilder(
int index) {
return getQuantParameterTensorNamesFieldBuilder().addBuilder(
index, onnx.OnnxMl.StringStringEntryProto.getDefaultInstance());
}
/**
*
* <key, value> pairs to annotate tensor specified by <tensor_name> above.
* The keys used in the mapping below must be pre-defined in ONNX spec.
* For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
* quantization parameter keys.
*
*
* repeated .onnx.StringStringEntryProto quant_parameter_tensor_names = 2;
*/
public java.util.List
getQuantParameterTensorNamesBuilderList() {
return getQuantParameterTensorNamesFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>
getQuantParameterTensorNamesFieldBuilder() {
if (quantParameterTensorNamesBuilder_ == null) {
quantParameterTensorNamesBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.StringStringEntryProto, onnx.OnnxMl.StringStringEntryProto.Builder, onnx.OnnxMl.StringStringEntryProtoOrBuilder>(
quantParameterTensorNames_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
quantParameterTensorNames_ = null;
}
return quantParameterTensorNamesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.TensorAnnotation)
}
// @@protoc_insertion_point(class_scope:onnx.TensorAnnotation)
private static final onnx.OnnxMl.TensorAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.TensorAnnotation();
}
public static onnx.OnnxMl.TensorAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public TensorAnnotation parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new TensorAnnotation(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.TensorAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface GraphProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.GraphProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
java.util.List
getNodeList();
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
onnx.OnnxMl.NodeProto getNode(int index);
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
int getNodeCount();
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
java.util.List
getNodeOrBuilderList();
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
onnx.OnnxMl.NodeProtoOrBuilder getNodeOrBuilder(
int index);
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return The name.
*/
java.lang.String getName();
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return The bytes for name.
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
java.util.List
getInitializerList();
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
onnx.OnnxMl.TensorProto getInitializer(int index);
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
int getInitializerCount();
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
java.util.List
getInitializerOrBuilderList();
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
onnx.OnnxMl.TensorProtoOrBuilder getInitializerOrBuilder(
int index);
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
java.util.List
getSparseInitializerList();
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
onnx.OnnxMl.SparseTensorProto getSparseInitializer(int index);
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
int getSparseInitializerCount();
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
java.util.List
getSparseInitializerOrBuilderList();
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseInitializerOrBuilder(
int index);
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return The docString.
*/
java.lang.String getDocString();
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return The bytes for docString.
*/
org.nd4j.shade.protobuf.ByteString
getDocStringBytes();
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
java.util.List
getInputList();
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
onnx.OnnxMl.ValueInfoProto getInput(int index);
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
int getInputCount();
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
java.util.List
getInputOrBuilderList();
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
onnx.OnnxMl.ValueInfoProtoOrBuilder getInputOrBuilder(
int index);
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
java.util.List
getOutputList();
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
onnx.OnnxMl.ValueInfoProto getOutput(int index);
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
int getOutputCount();
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
java.util.List
getOutputOrBuilderList();
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
onnx.OnnxMl.ValueInfoProtoOrBuilder getOutputOrBuilder(
int index);
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
java.util.List
getValueInfoList();
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
onnx.OnnxMl.ValueInfoProto getValueInfo(int index);
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
int getValueInfoCount();
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
java.util.List
getValueInfoOrBuilderList();
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
onnx.OnnxMl.ValueInfoProtoOrBuilder getValueInfoOrBuilder(
int index);
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
java.util.List
getQuantizationAnnotationList();
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
onnx.OnnxMl.TensorAnnotation getQuantizationAnnotation(int index);
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
int getQuantizationAnnotationCount();
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
java.util.List
getQuantizationAnnotationOrBuilderList();
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
onnx.OnnxMl.TensorAnnotationOrBuilder getQuantizationAnnotationOrBuilder(
int index);
}
/**
*
* Graphs
* A graph defines the computational logic of a model and is comprised of a parameterized
* list of nodes that form a directed acyclic graph based on their inputs and outputs.
* This is the equivalent of the "network" or "graph" in many deep learning
* frameworks.
*
*
* Protobuf type {@code onnx.GraphProto}
*/
public static final class GraphProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.GraphProto)
GraphProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GraphProto.newBuilder() to construct.
private GraphProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private GraphProto() {
node_ = java.util.Collections.emptyList();
name_ = "";
initializer_ = java.util.Collections.emptyList();
sparseInitializer_ = java.util.Collections.emptyList();
docString_ = "";
input_ = java.util.Collections.emptyList();
output_ = java.util.Collections.emptyList();
valueInfo_ = java.util.Collections.emptyList();
quantizationAnnotation_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GraphProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GraphProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
node_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
node_.add(
input.readMessage(onnx.OnnxMl.NodeProto.parser(), extensionRegistry));
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 42: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
initializer_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000002;
}
initializer_.add(
input.readMessage(onnx.OnnxMl.TensorProto.parser(), extensionRegistry));
break;
}
case 82: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
case 90: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
input_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
input_.add(
input.readMessage(onnx.OnnxMl.ValueInfoProto.parser(), extensionRegistry));
break;
}
case 98: {
if (!((mutable_bitField0_ & 0x00000010) != 0)) {
output_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000010;
}
output_.add(
input.readMessage(onnx.OnnxMl.ValueInfoProto.parser(), extensionRegistry));
break;
}
case 106: {
if (!((mutable_bitField0_ & 0x00000020) != 0)) {
valueInfo_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000020;
}
valueInfo_.add(
input.readMessage(onnx.OnnxMl.ValueInfoProto.parser(), extensionRegistry));
break;
}
case 114: {
if (!((mutable_bitField0_ & 0x00000040) != 0)) {
quantizationAnnotation_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000040;
}
quantizationAnnotation_.add(
input.readMessage(onnx.OnnxMl.TensorAnnotation.parser(), extensionRegistry));
break;
}
case 122: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
sparseInitializer_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
sparseInitializer_.add(
input.readMessage(onnx.OnnxMl.SparseTensorProto.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
node_ = java.util.Collections.unmodifiableList(node_);
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
initializer_ = java.util.Collections.unmodifiableList(initializer_);
}
if (((mutable_bitField0_ & 0x00000008) != 0)) {
input_ = java.util.Collections.unmodifiableList(input_);
}
if (((mutable_bitField0_ & 0x00000010) != 0)) {
output_ = java.util.Collections.unmodifiableList(output_);
}
if (((mutable_bitField0_ & 0x00000020) != 0)) {
valueInfo_ = java.util.Collections.unmodifiableList(valueInfo_);
}
if (((mutable_bitField0_ & 0x00000040) != 0)) {
quantizationAnnotation_ = java.util.Collections.unmodifiableList(quantizationAnnotation_);
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
sparseInitializer_ = java.util.Collections.unmodifiableList(sparseInitializer_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_GraphProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_GraphProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.GraphProto.class, onnx.OnnxMl.GraphProto.Builder.class);
}
public static final int NODE_FIELD_NUMBER = 1;
private java.util.List node_;
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
@java.lang.Override
public java.util.List getNodeList() {
return node_;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
@java.lang.Override
public java.util.List
getNodeOrBuilderList() {
return node_;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
@java.lang.Override
public int getNodeCount() {
return node_.size();
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
@java.lang.Override
public onnx.OnnxMl.NodeProto getNode(int index) {
return node_.get(index);
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
@java.lang.Override
public onnx.OnnxMl.NodeProtoOrBuilder getNodeOrBuilder(
int index) {
return node_.get(index);
}
public static final int NAME_FIELD_NUMBER = 2;
private volatile java.lang.Object name_;
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return The bytes for name.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int INITIALIZER_FIELD_NUMBER = 5;
private java.util.List initializer_;
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
@java.lang.Override
public java.util.List getInitializerList() {
return initializer_;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
@java.lang.Override
public java.util.List
getInitializerOrBuilderList() {
return initializer_;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
@java.lang.Override
public int getInitializerCount() {
return initializer_.size();
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
@java.lang.Override
public onnx.OnnxMl.TensorProto getInitializer(int index) {
return initializer_.get(index);
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
@java.lang.Override
public onnx.OnnxMl.TensorProtoOrBuilder getInitializerOrBuilder(
int index) {
return initializer_.get(index);
}
public static final int SPARSE_INITIALIZER_FIELD_NUMBER = 15;
private java.util.List sparseInitializer_;
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
@java.lang.Override
public java.util.List getSparseInitializerList() {
return sparseInitializer_;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
@java.lang.Override
public java.util.List
getSparseInitializerOrBuilderList() {
return sparseInitializer_;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
@java.lang.Override
public int getSparseInitializerCount() {
return sparseInitializer_.size();
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
@java.lang.Override
public onnx.OnnxMl.SparseTensorProto getSparseInitializer(int index) {
return sparseInitializer_.get(index);
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
@java.lang.Override
public onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseInitializerOrBuilder(
int index) {
return sparseInitializer_.get(index);
}
public static final int DOC_STRING_FIELD_NUMBER = 10;
private volatile java.lang.Object docString_;
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return The docString.
*/
@java.lang.Override
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
}
}
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return The bytes for docString.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int INPUT_FIELD_NUMBER = 11;
private java.util.List input_;
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
@java.lang.Override
public java.util.List getInputList() {
return input_;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
@java.lang.Override
public java.util.List
getInputOrBuilderList() {
return input_;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
@java.lang.Override
public int getInputCount() {
return input_.size();
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto getInput(int index) {
return input_.get(index);
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
@java.lang.Override
public onnx.OnnxMl.ValueInfoProtoOrBuilder getInputOrBuilder(
int index) {
return input_.get(index);
}
public static final int OUTPUT_FIELD_NUMBER = 12;
private java.util.List output_;
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
@java.lang.Override
public java.util.List getOutputList() {
return output_;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
@java.lang.Override
public java.util.List
getOutputOrBuilderList() {
return output_;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
@java.lang.Override
public int getOutputCount() {
return output_.size();
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto getOutput(int index) {
return output_.get(index);
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
@java.lang.Override
public onnx.OnnxMl.ValueInfoProtoOrBuilder getOutputOrBuilder(
int index) {
return output_.get(index);
}
public static final int VALUE_INFO_FIELD_NUMBER = 13;
private java.util.List valueInfo_;
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
@java.lang.Override
public java.util.List getValueInfoList() {
return valueInfo_;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
@java.lang.Override
public java.util.List
getValueInfoOrBuilderList() {
return valueInfo_;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
@java.lang.Override
public int getValueInfoCount() {
return valueInfo_.size();
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
@java.lang.Override
public onnx.OnnxMl.ValueInfoProto getValueInfo(int index) {
return valueInfo_.get(index);
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
@java.lang.Override
public onnx.OnnxMl.ValueInfoProtoOrBuilder getValueInfoOrBuilder(
int index) {
return valueInfo_.get(index);
}
public static final int QUANTIZATION_ANNOTATION_FIELD_NUMBER = 14;
private java.util.List quantizationAnnotation_;
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
@java.lang.Override
public java.util.List getQuantizationAnnotationList() {
return quantizationAnnotation_;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
@java.lang.Override
public java.util.List
getQuantizationAnnotationOrBuilderList() {
return quantizationAnnotation_;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
@java.lang.Override
public int getQuantizationAnnotationCount() {
return quantizationAnnotation_.size();
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
@java.lang.Override
public onnx.OnnxMl.TensorAnnotation getQuantizationAnnotation(int index) {
return quantizationAnnotation_.get(index);
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
@java.lang.Override
public onnx.OnnxMl.TensorAnnotationOrBuilder getQuantizationAnnotationOrBuilder(
int index) {
return quantizationAnnotation_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < node_.size(); i++) {
output.writeMessage(1, node_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, name_);
}
for (int i = 0; i < initializer_.size(); i++) {
output.writeMessage(5, initializer_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 10, docString_);
}
for (int i = 0; i < input_.size(); i++) {
output.writeMessage(11, input_.get(i));
}
for (int i = 0; i < output_.size(); i++) {
output.writeMessage(12, output_.get(i));
}
for (int i = 0; i < valueInfo_.size(); i++) {
output.writeMessage(13, valueInfo_.get(i));
}
for (int i = 0; i < quantizationAnnotation_.size(); i++) {
output.writeMessage(14, quantizationAnnotation_.get(i));
}
for (int i = 0; i < sparseInitializer_.size(); i++) {
output.writeMessage(15, sparseInitializer_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < node_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(1, node_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, name_);
}
for (int i = 0; i < initializer_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(5, initializer_.get(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(10, docString_);
}
for (int i = 0; i < input_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(11, input_.get(i));
}
for (int i = 0; i < output_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(12, output_.get(i));
}
for (int i = 0; i < valueInfo_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(13, valueInfo_.get(i));
}
for (int i = 0; i < quantizationAnnotation_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(14, quantizationAnnotation_.get(i));
}
for (int i = 0; i < sparseInitializer_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(15, sparseInitializer_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.GraphProto)) {
return super.equals(obj);
}
onnx.OnnxMl.GraphProto other = (onnx.OnnxMl.GraphProto) obj;
if (!getNodeList()
.equals(other.getNodeList())) return false;
if (!getName()
.equals(other.getName())) return false;
if (!getInitializerList()
.equals(other.getInitializerList())) return false;
if (!getSparseInitializerList()
.equals(other.getSparseInitializerList())) return false;
if (!getDocString()
.equals(other.getDocString())) return false;
if (!getInputList()
.equals(other.getInputList())) return false;
if (!getOutputList()
.equals(other.getOutputList())) return false;
if (!getValueInfoList()
.equals(other.getValueInfoList())) return false;
if (!getQuantizationAnnotationList()
.equals(other.getQuantizationAnnotationList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getNodeCount() > 0) {
hash = (37 * hash) + NODE_FIELD_NUMBER;
hash = (53 * hash) + getNodeList().hashCode();
}
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (getInitializerCount() > 0) {
hash = (37 * hash) + INITIALIZER_FIELD_NUMBER;
hash = (53 * hash) + getInitializerList().hashCode();
}
if (getSparseInitializerCount() > 0) {
hash = (37 * hash) + SPARSE_INITIALIZER_FIELD_NUMBER;
hash = (53 * hash) + getSparseInitializerList().hashCode();
}
hash = (37 * hash) + DOC_STRING_FIELD_NUMBER;
hash = (53 * hash) + getDocString().hashCode();
if (getInputCount() > 0) {
hash = (37 * hash) + INPUT_FIELD_NUMBER;
hash = (53 * hash) + getInputList().hashCode();
}
if (getOutputCount() > 0) {
hash = (37 * hash) + OUTPUT_FIELD_NUMBER;
hash = (53 * hash) + getOutputList().hashCode();
}
if (getValueInfoCount() > 0) {
hash = (37 * hash) + VALUE_INFO_FIELD_NUMBER;
hash = (53 * hash) + getValueInfoList().hashCode();
}
if (getQuantizationAnnotationCount() > 0) {
hash = (37 * hash) + QUANTIZATION_ANNOTATION_FIELD_NUMBER;
hash = (53 * hash) + getQuantizationAnnotationList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.GraphProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.GraphProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.GraphProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.GraphProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.GraphProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.GraphProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.GraphProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.GraphProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.GraphProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.GraphProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.GraphProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.GraphProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.GraphProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Graphs
* A graph defines the computational logic of a model and is comprised of a parameterized
* list of nodes that form a directed acyclic graph based on their inputs and outputs.
* This is the equivalent of the "network" or "graph" in many deep learning
* frameworks.
*
*
* Protobuf type {@code onnx.GraphProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.GraphProto)
onnx.OnnxMl.GraphProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_GraphProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_GraphProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.GraphProto.class, onnx.OnnxMl.GraphProto.Builder.class);
}
// Construct using onnx.OnnxMl.GraphProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getNodeFieldBuilder();
getInitializerFieldBuilder();
getSparseInitializerFieldBuilder();
getInputFieldBuilder();
getOutputFieldBuilder();
getValueInfoFieldBuilder();
getQuantizationAnnotationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (nodeBuilder_ == null) {
node_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
nodeBuilder_.clear();
}
name_ = "";
if (initializerBuilder_ == null) {
initializer_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
initializerBuilder_.clear();
}
if (sparseInitializerBuilder_ == null) {
sparseInitializer_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
sparseInitializerBuilder_.clear();
}
docString_ = "";
if (inputBuilder_ == null) {
input_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
inputBuilder_.clear();
}
if (outputBuilder_ == null) {
output_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
} else {
outputBuilder_.clear();
}
if (valueInfoBuilder_ == null) {
valueInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
} else {
valueInfoBuilder_.clear();
}
if (quantizationAnnotationBuilder_ == null) {
quantizationAnnotation_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
} else {
quantizationAnnotationBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_GraphProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.GraphProto getDefaultInstanceForType() {
return onnx.OnnxMl.GraphProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.GraphProto build() {
onnx.OnnxMl.GraphProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.GraphProto buildPartial() {
onnx.OnnxMl.GraphProto result = new onnx.OnnxMl.GraphProto(this);
int from_bitField0_ = bitField0_;
if (nodeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
node_ = java.util.Collections.unmodifiableList(node_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.node_ = node_;
} else {
result.node_ = nodeBuilder_.build();
}
result.name_ = name_;
if (initializerBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
initializer_ = java.util.Collections.unmodifiableList(initializer_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.initializer_ = initializer_;
} else {
result.initializer_ = initializerBuilder_.build();
}
if (sparseInitializerBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)) {
sparseInitializer_ = java.util.Collections.unmodifiableList(sparseInitializer_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.sparseInitializer_ = sparseInitializer_;
} else {
result.sparseInitializer_ = sparseInitializerBuilder_.build();
}
result.docString_ = docString_;
if (inputBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)) {
input_ = java.util.Collections.unmodifiableList(input_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.input_ = input_;
} else {
result.input_ = inputBuilder_.build();
}
if (outputBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0)) {
output_ = java.util.Collections.unmodifiableList(output_);
bitField0_ = (bitField0_ & ~0x00000010);
}
result.output_ = output_;
} else {
result.output_ = outputBuilder_.build();
}
if (valueInfoBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
valueInfo_ = java.util.Collections.unmodifiableList(valueInfo_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.valueInfo_ = valueInfo_;
} else {
result.valueInfo_ = valueInfoBuilder_.build();
}
if (quantizationAnnotationBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0)) {
quantizationAnnotation_ = java.util.Collections.unmodifiableList(quantizationAnnotation_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.quantizationAnnotation_ = quantizationAnnotation_;
} else {
result.quantizationAnnotation_ = quantizationAnnotationBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.GraphProto) {
return mergeFrom((onnx.OnnxMl.GraphProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.GraphProto other) {
if (other == onnx.OnnxMl.GraphProto.getDefaultInstance()) return this;
if (nodeBuilder_ == null) {
if (!other.node_.isEmpty()) {
if (node_.isEmpty()) {
node_ = other.node_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureNodeIsMutable();
node_.addAll(other.node_);
}
onChanged();
}
} else {
if (!other.node_.isEmpty()) {
if (nodeBuilder_.isEmpty()) {
nodeBuilder_.dispose();
nodeBuilder_ = null;
node_ = other.node_;
bitField0_ = (bitField0_ & ~0x00000001);
nodeBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getNodeFieldBuilder() : null;
} else {
nodeBuilder_.addAllMessages(other.node_);
}
}
}
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (initializerBuilder_ == null) {
if (!other.initializer_.isEmpty()) {
if (initializer_.isEmpty()) {
initializer_ = other.initializer_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureInitializerIsMutable();
initializer_.addAll(other.initializer_);
}
onChanged();
}
} else {
if (!other.initializer_.isEmpty()) {
if (initializerBuilder_.isEmpty()) {
initializerBuilder_.dispose();
initializerBuilder_ = null;
initializer_ = other.initializer_;
bitField0_ = (bitField0_ & ~0x00000002);
initializerBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getInitializerFieldBuilder() : null;
} else {
initializerBuilder_.addAllMessages(other.initializer_);
}
}
}
if (sparseInitializerBuilder_ == null) {
if (!other.sparseInitializer_.isEmpty()) {
if (sparseInitializer_.isEmpty()) {
sparseInitializer_ = other.sparseInitializer_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureSparseInitializerIsMutable();
sparseInitializer_.addAll(other.sparseInitializer_);
}
onChanged();
}
} else {
if (!other.sparseInitializer_.isEmpty()) {
if (sparseInitializerBuilder_.isEmpty()) {
sparseInitializerBuilder_.dispose();
sparseInitializerBuilder_ = null;
sparseInitializer_ = other.sparseInitializer_;
bitField0_ = (bitField0_ & ~0x00000004);
sparseInitializerBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSparseInitializerFieldBuilder() : null;
} else {
sparseInitializerBuilder_.addAllMessages(other.sparseInitializer_);
}
}
}
if (!other.getDocString().isEmpty()) {
docString_ = other.docString_;
onChanged();
}
if (inputBuilder_ == null) {
if (!other.input_.isEmpty()) {
if (input_.isEmpty()) {
input_ = other.input_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureInputIsMutable();
input_.addAll(other.input_);
}
onChanged();
}
} else {
if (!other.input_.isEmpty()) {
if (inputBuilder_.isEmpty()) {
inputBuilder_.dispose();
inputBuilder_ = null;
input_ = other.input_;
bitField0_ = (bitField0_ & ~0x00000008);
inputBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getInputFieldBuilder() : null;
} else {
inputBuilder_.addAllMessages(other.input_);
}
}
}
if (outputBuilder_ == null) {
if (!other.output_.isEmpty()) {
if (output_.isEmpty()) {
output_ = other.output_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureOutputIsMutable();
output_.addAll(other.output_);
}
onChanged();
}
} else {
if (!other.output_.isEmpty()) {
if (outputBuilder_.isEmpty()) {
outputBuilder_.dispose();
outputBuilder_ = null;
output_ = other.output_;
bitField0_ = (bitField0_ & ~0x00000010);
outputBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getOutputFieldBuilder() : null;
} else {
outputBuilder_.addAllMessages(other.output_);
}
}
}
if (valueInfoBuilder_ == null) {
if (!other.valueInfo_.isEmpty()) {
if (valueInfo_.isEmpty()) {
valueInfo_ = other.valueInfo_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureValueInfoIsMutable();
valueInfo_.addAll(other.valueInfo_);
}
onChanged();
}
} else {
if (!other.valueInfo_.isEmpty()) {
if (valueInfoBuilder_.isEmpty()) {
valueInfoBuilder_.dispose();
valueInfoBuilder_ = null;
valueInfo_ = other.valueInfo_;
bitField0_ = (bitField0_ & ~0x00000020);
valueInfoBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getValueInfoFieldBuilder() : null;
} else {
valueInfoBuilder_.addAllMessages(other.valueInfo_);
}
}
}
if (quantizationAnnotationBuilder_ == null) {
if (!other.quantizationAnnotation_.isEmpty()) {
if (quantizationAnnotation_.isEmpty()) {
quantizationAnnotation_ = other.quantizationAnnotation_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.addAll(other.quantizationAnnotation_);
}
onChanged();
}
} else {
if (!other.quantizationAnnotation_.isEmpty()) {
if (quantizationAnnotationBuilder_.isEmpty()) {
quantizationAnnotationBuilder_.dispose();
quantizationAnnotationBuilder_ = null;
quantizationAnnotation_ = other.quantizationAnnotation_;
bitField0_ = (bitField0_ & ~0x00000040);
quantizationAnnotationBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getQuantizationAnnotationFieldBuilder() : null;
} else {
quantizationAnnotationBuilder_.addAllMessages(other.quantizationAnnotation_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.GraphProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.GraphProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List node_ =
java.util.Collections.emptyList();
private void ensureNodeIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
node_ = new java.util.ArrayList(node_);
bitField0_ |= 0x00000001;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.NodeProto, onnx.OnnxMl.NodeProto.Builder, onnx.OnnxMl.NodeProtoOrBuilder> nodeBuilder_;
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public java.util.List getNodeList() {
if (nodeBuilder_ == null) {
return java.util.Collections.unmodifiableList(node_);
} else {
return nodeBuilder_.getMessageList();
}
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public int getNodeCount() {
if (nodeBuilder_ == null) {
return node_.size();
} else {
return nodeBuilder_.getCount();
}
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public onnx.OnnxMl.NodeProto getNode(int index) {
if (nodeBuilder_ == null) {
return node_.get(index);
} else {
return nodeBuilder_.getMessage(index);
}
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder setNode(
int index, onnx.OnnxMl.NodeProto value) {
if (nodeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeIsMutable();
node_.set(index, value);
onChanged();
} else {
nodeBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder setNode(
int index, onnx.OnnxMl.NodeProto.Builder builderForValue) {
if (nodeBuilder_ == null) {
ensureNodeIsMutable();
node_.set(index, builderForValue.build());
onChanged();
} else {
nodeBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder addNode(onnx.OnnxMl.NodeProto value) {
if (nodeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeIsMutable();
node_.add(value);
onChanged();
} else {
nodeBuilder_.addMessage(value);
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder addNode(
int index, onnx.OnnxMl.NodeProto value) {
if (nodeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureNodeIsMutable();
node_.add(index, value);
onChanged();
} else {
nodeBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder addNode(
onnx.OnnxMl.NodeProto.Builder builderForValue) {
if (nodeBuilder_ == null) {
ensureNodeIsMutable();
node_.add(builderForValue.build());
onChanged();
} else {
nodeBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder addNode(
int index, onnx.OnnxMl.NodeProto.Builder builderForValue) {
if (nodeBuilder_ == null) {
ensureNodeIsMutable();
node_.add(index, builderForValue.build());
onChanged();
} else {
nodeBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder addAllNode(
java.lang.Iterable values) {
if (nodeBuilder_ == null) {
ensureNodeIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, node_);
onChanged();
} else {
nodeBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder clearNode() {
if (nodeBuilder_ == null) {
node_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
nodeBuilder_.clear();
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public Builder removeNode(int index) {
if (nodeBuilder_ == null) {
ensureNodeIsMutable();
node_.remove(index);
onChanged();
} else {
nodeBuilder_.remove(index);
}
return this;
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public onnx.OnnxMl.NodeProto.Builder getNodeBuilder(
int index) {
return getNodeFieldBuilder().getBuilder(index);
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public onnx.OnnxMl.NodeProtoOrBuilder getNodeOrBuilder(
int index) {
if (nodeBuilder_ == null) {
return node_.get(index); } else {
return nodeBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public java.util.List
getNodeOrBuilderList() {
if (nodeBuilder_ != null) {
return nodeBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(node_);
}
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public onnx.OnnxMl.NodeProto.Builder addNodeBuilder() {
return getNodeFieldBuilder().addBuilder(
onnx.OnnxMl.NodeProto.getDefaultInstance());
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public onnx.OnnxMl.NodeProto.Builder addNodeBuilder(
int index) {
return getNodeFieldBuilder().addBuilder(
index, onnx.OnnxMl.NodeProto.getDefaultInstance());
}
/**
*
* The nodes in the graph, sorted topologically.
*
*
* repeated .onnx.NodeProto node = 1;
*/
public java.util.List
getNodeBuilderList() {
return getNodeFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.NodeProto, onnx.OnnxMl.NodeProto.Builder, onnx.OnnxMl.NodeProtoOrBuilder>
getNodeFieldBuilder() {
if (nodeBuilder_ == null) {
nodeBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.NodeProto, onnx.OnnxMl.NodeProto.Builder, onnx.OnnxMl.NodeProtoOrBuilder>(
node_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
node_ = null;
}
return nodeBuilder_;
}
private java.lang.Object name_ = "";
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return The bytes for name.
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* The name of the graph.
*
*
* string name = 2;
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
* The name of the graph.
*
*
* string name = 2;
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
* The name of the graph.
*
*
* string name = 2;
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.util.List initializer_ =
java.util.Collections.emptyList();
private void ensureInitializerIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
initializer_ = new java.util.ArrayList(initializer_);
bitField0_ |= 0x00000002;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder> initializerBuilder_;
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public java.util.List getInitializerList() {
if (initializerBuilder_ == null) {
return java.util.Collections.unmodifiableList(initializer_);
} else {
return initializerBuilder_.getMessageList();
}
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public int getInitializerCount() {
if (initializerBuilder_ == null) {
return initializer_.size();
} else {
return initializerBuilder_.getCount();
}
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public onnx.OnnxMl.TensorProto getInitializer(int index) {
if (initializerBuilder_ == null) {
return initializer_.get(index);
} else {
return initializerBuilder_.getMessage(index);
}
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder setInitializer(
int index, onnx.OnnxMl.TensorProto value) {
if (initializerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInitializerIsMutable();
initializer_.set(index, value);
onChanged();
} else {
initializerBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder setInitializer(
int index, onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (initializerBuilder_ == null) {
ensureInitializerIsMutable();
initializer_.set(index, builderForValue.build());
onChanged();
} else {
initializerBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder addInitializer(onnx.OnnxMl.TensorProto value) {
if (initializerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInitializerIsMutable();
initializer_.add(value);
onChanged();
} else {
initializerBuilder_.addMessage(value);
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder addInitializer(
int index, onnx.OnnxMl.TensorProto value) {
if (initializerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInitializerIsMutable();
initializer_.add(index, value);
onChanged();
} else {
initializerBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder addInitializer(
onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (initializerBuilder_ == null) {
ensureInitializerIsMutable();
initializer_.add(builderForValue.build());
onChanged();
} else {
initializerBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder addInitializer(
int index, onnx.OnnxMl.TensorProto.Builder builderForValue) {
if (initializerBuilder_ == null) {
ensureInitializerIsMutable();
initializer_.add(index, builderForValue.build());
onChanged();
} else {
initializerBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder addAllInitializer(
java.lang.Iterable values) {
if (initializerBuilder_ == null) {
ensureInitializerIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, initializer_);
onChanged();
} else {
initializerBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder clearInitializer() {
if (initializerBuilder_ == null) {
initializer_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
initializerBuilder_.clear();
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public Builder removeInitializer(int index) {
if (initializerBuilder_ == null) {
ensureInitializerIsMutable();
initializer_.remove(index);
onChanged();
} else {
initializerBuilder_.remove(index);
}
return this;
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public onnx.OnnxMl.TensorProto.Builder getInitializerBuilder(
int index) {
return getInitializerFieldBuilder().getBuilder(index);
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public onnx.OnnxMl.TensorProtoOrBuilder getInitializerOrBuilder(
int index) {
if (initializerBuilder_ == null) {
return initializer_.get(index); } else {
return initializerBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public java.util.List
getInitializerOrBuilderList() {
if (initializerBuilder_ != null) {
return initializerBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(initializer_);
}
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public onnx.OnnxMl.TensorProto.Builder addInitializerBuilder() {
return getInitializerFieldBuilder().addBuilder(
onnx.OnnxMl.TensorProto.getDefaultInstance());
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public onnx.OnnxMl.TensorProto.Builder addInitializerBuilder(
int index) {
return getInitializerFieldBuilder().addBuilder(
index, onnx.OnnxMl.TensorProto.getDefaultInstance());
}
/**
*
* A list of named tensor values, used to specify constant inputs of the graph.
* Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
* The name MUST be unique across both initializer and sparse_initializer,
* but the name MAY also appear in the input list.
*
*
* repeated .onnx.TensorProto initializer = 5;
*/
public java.util.List
getInitializerBuilderList() {
return getInitializerFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder>
getInitializerFieldBuilder() {
if (initializerBuilder_ == null) {
initializerBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorProto, onnx.OnnxMl.TensorProto.Builder, onnx.OnnxMl.TensorProtoOrBuilder>(
initializer_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
initializer_ = null;
}
return initializerBuilder_;
}
private java.util.List sparseInitializer_ =
java.util.Collections.emptyList();
private void ensureSparseInitializerIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
sparseInitializer_ = new java.util.ArrayList(sparseInitializer_);
bitField0_ |= 0x00000004;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder> sparseInitializerBuilder_;
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public java.util.List getSparseInitializerList() {
if (sparseInitializerBuilder_ == null) {
return java.util.Collections.unmodifiableList(sparseInitializer_);
} else {
return sparseInitializerBuilder_.getMessageList();
}
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public int getSparseInitializerCount() {
if (sparseInitializerBuilder_ == null) {
return sparseInitializer_.size();
} else {
return sparseInitializerBuilder_.getCount();
}
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public onnx.OnnxMl.SparseTensorProto getSparseInitializer(int index) {
if (sparseInitializerBuilder_ == null) {
return sparseInitializer_.get(index);
} else {
return sparseInitializerBuilder_.getMessage(index);
}
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder setSparseInitializer(
int index, onnx.OnnxMl.SparseTensorProto value) {
if (sparseInitializerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSparseInitializerIsMutable();
sparseInitializer_.set(index, value);
onChanged();
} else {
sparseInitializerBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder setSparseInitializer(
int index, onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseInitializerBuilder_ == null) {
ensureSparseInitializerIsMutable();
sparseInitializer_.set(index, builderForValue.build());
onChanged();
} else {
sparseInitializerBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder addSparseInitializer(onnx.OnnxMl.SparseTensorProto value) {
if (sparseInitializerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSparseInitializerIsMutable();
sparseInitializer_.add(value);
onChanged();
} else {
sparseInitializerBuilder_.addMessage(value);
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder addSparseInitializer(
int index, onnx.OnnxMl.SparseTensorProto value) {
if (sparseInitializerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSparseInitializerIsMutable();
sparseInitializer_.add(index, value);
onChanged();
} else {
sparseInitializerBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder addSparseInitializer(
onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseInitializerBuilder_ == null) {
ensureSparseInitializerIsMutable();
sparseInitializer_.add(builderForValue.build());
onChanged();
} else {
sparseInitializerBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder addSparseInitializer(
int index, onnx.OnnxMl.SparseTensorProto.Builder builderForValue) {
if (sparseInitializerBuilder_ == null) {
ensureSparseInitializerIsMutable();
sparseInitializer_.add(index, builderForValue.build());
onChanged();
} else {
sparseInitializerBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder addAllSparseInitializer(
java.lang.Iterable values) {
if (sparseInitializerBuilder_ == null) {
ensureSparseInitializerIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, sparseInitializer_);
onChanged();
} else {
sparseInitializerBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder clearSparseInitializer() {
if (sparseInitializerBuilder_ == null) {
sparseInitializer_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
sparseInitializerBuilder_.clear();
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public Builder removeSparseInitializer(int index) {
if (sparseInitializerBuilder_ == null) {
ensureSparseInitializerIsMutable();
sparseInitializer_.remove(index);
onChanged();
} else {
sparseInitializerBuilder_.remove(index);
}
return this;
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public onnx.OnnxMl.SparseTensorProto.Builder getSparseInitializerBuilder(
int index) {
return getSparseInitializerFieldBuilder().getBuilder(index);
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public onnx.OnnxMl.SparseTensorProtoOrBuilder getSparseInitializerOrBuilder(
int index) {
if (sparseInitializerBuilder_ == null) {
return sparseInitializer_.get(index); } else {
return sparseInitializerBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public java.util.List
getSparseInitializerOrBuilderList() {
if (sparseInitializerBuilder_ != null) {
return sparseInitializerBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sparseInitializer_);
}
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public onnx.OnnxMl.SparseTensorProto.Builder addSparseInitializerBuilder() {
return getSparseInitializerFieldBuilder().addBuilder(
onnx.OnnxMl.SparseTensorProto.getDefaultInstance());
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public onnx.OnnxMl.SparseTensorProto.Builder addSparseInitializerBuilder(
int index) {
return getSparseInitializerFieldBuilder().addBuilder(
index, onnx.OnnxMl.SparseTensorProto.getDefaultInstance());
}
/**
*
* Initializers (see above) stored in sparse format.
*
*
* repeated .onnx.SparseTensorProto sparse_initializer = 15;
*/
public java.util.List
getSparseInitializerBuilderList() {
return getSparseInitializerFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder>
getSparseInitializerFieldBuilder() {
if (sparseInitializerBuilder_ == null) {
sparseInitializerBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.SparseTensorProto, onnx.OnnxMl.SparseTensorProto.Builder, onnx.OnnxMl.SparseTensorProtoOrBuilder>(
sparseInitializer_,
((bitField0_ & 0x00000004) != 0),
getParentForChildren(),
isClean());
sparseInitializer_ = null;
}
return sparseInitializerBuilder_;
}
private java.lang.Object docString_ = "";
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return The docString.
*/
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return The bytes for docString.
*/
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @param value The docString to set.
* @return This builder for chaining.
*/
public Builder setDocString(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
docString_ = value;
onChanged();
return this;
}
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @return This builder for chaining.
*/
public Builder clearDocString() {
docString_ = getDefaultInstance().getDocString();
onChanged();
return this;
}
/**
*
* A human-readable documentation for this graph. Markdown is allowed.
*
*
* string doc_string = 10;
* @param value The bytes for docString to set.
* @return This builder for chaining.
*/
public Builder setDocStringBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
docString_ = value;
onChanged();
return this;
}
private java.util.List input_ =
java.util.Collections.emptyList();
private void ensureInputIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
input_ = new java.util.ArrayList(input_);
bitField0_ |= 0x00000008;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder> inputBuilder_;
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public java.util.List getInputList() {
if (inputBuilder_ == null) {
return java.util.Collections.unmodifiableList(input_);
} else {
return inputBuilder_.getMessageList();
}
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public int getInputCount() {
if (inputBuilder_ == null) {
return input_.size();
} else {
return inputBuilder_.getCount();
}
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public onnx.OnnxMl.ValueInfoProto getInput(int index) {
if (inputBuilder_ == null) {
return input_.get(index);
} else {
return inputBuilder_.getMessage(index);
}
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder setInput(
int index, onnx.OnnxMl.ValueInfoProto value) {
if (inputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInputIsMutable();
input_.set(index, value);
onChanged();
} else {
inputBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder setInput(
int index, onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (inputBuilder_ == null) {
ensureInputIsMutable();
input_.set(index, builderForValue.build());
onChanged();
} else {
inputBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder addInput(onnx.OnnxMl.ValueInfoProto value) {
if (inputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInputIsMutable();
input_.add(value);
onChanged();
} else {
inputBuilder_.addMessage(value);
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder addInput(
int index, onnx.OnnxMl.ValueInfoProto value) {
if (inputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInputIsMutable();
input_.add(index, value);
onChanged();
} else {
inputBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder addInput(
onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (inputBuilder_ == null) {
ensureInputIsMutable();
input_.add(builderForValue.build());
onChanged();
} else {
inputBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder addInput(
int index, onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (inputBuilder_ == null) {
ensureInputIsMutable();
input_.add(index, builderForValue.build());
onChanged();
} else {
inputBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder addAllInput(
java.lang.Iterable values) {
if (inputBuilder_ == null) {
ensureInputIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, input_);
onChanged();
} else {
inputBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder clearInput() {
if (inputBuilder_ == null) {
input_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
inputBuilder_.clear();
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public Builder removeInput(int index) {
if (inputBuilder_ == null) {
ensureInputIsMutable();
input_.remove(index);
onChanged();
} else {
inputBuilder_.remove(index);
}
return this;
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public onnx.OnnxMl.ValueInfoProto.Builder getInputBuilder(
int index) {
return getInputFieldBuilder().getBuilder(index);
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public onnx.OnnxMl.ValueInfoProtoOrBuilder getInputOrBuilder(
int index) {
if (inputBuilder_ == null) {
return input_.get(index); } else {
return inputBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public java.util.List
getInputOrBuilderList() {
if (inputBuilder_ != null) {
return inputBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(input_);
}
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public onnx.OnnxMl.ValueInfoProto.Builder addInputBuilder() {
return getInputFieldBuilder().addBuilder(
onnx.OnnxMl.ValueInfoProto.getDefaultInstance());
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public onnx.OnnxMl.ValueInfoProto.Builder addInputBuilder(
int index) {
return getInputFieldBuilder().addBuilder(
index, onnx.OnnxMl.ValueInfoProto.getDefaultInstance());
}
/**
*
* The inputs and outputs of the graph.
*
*
* repeated .onnx.ValueInfoProto input = 11;
*/
public java.util.List
getInputBuilderList() {
return getInputFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder>
getInputFieldBuilder() {
if (inputBuilder_ == null) {
inputBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder>(
input_,
((bitField0_ & 0x00000008) != 0),
getParentForChildren(),
isClean());
input_ = null;
}
return inputBuilder_;
}
private java.util.List output_ =
java.util.Collections.emptyList();
private void ensureOutputIsMutable() {
if (!((bitField0_ & 0x00000010) != 0)) {
output_ = new java.util.ArrayList(output_);
bitField0_ |= 0x00000010;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder> outputBuilder_;
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public java.util.List getOutputList() {
if (outputBuilder_ == null) {
return java.util.Collections.unmodifiableList(output_);
} else {
return outputBuilder_.getMessageList();
}
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public int getOutputCount() {
if (outputBuilder_ == null) {
return output_.size();
} else {
return outputBuilder_.getCount();
}
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public onnx.OnnxMl.ValueInfoProto getOutput(int index) {
if (outputBuilder_ == null) {
return output_.get(index);
} else {
return outputBuilder_.getMessage(index);
}
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder setOutput(
int index, onnx.OnnxMl.ValueInfoProto value) {
if (outputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputIsMutable();
output_.set(index, value);
onChanged();
} else {
outputBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder setOutput(
int index, onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (outputBuilder_ == null) {
ensureOutputIsMutable();
output_.set(index, builderForValue.build());
onChanged();
} else {
outputBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder addOutput(onnx.OnnxMl.ValueInfoProto value) {
if (outputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputIsMutable();
output_.add(value);
onChanged();
} else {
outputBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder addOutput(
int index, onnx.OnnxMl.ValueInfoProto value) {
if (outputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOutputIsMutable();
output_.add(index, value);
onChanged();
} else {
outputBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder addOutput(
onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (outputBuilder_ == null) {
ensureOutputIsMutable();
output_.add(builderForValue.build());
onChanged();
} else {
outputBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder addOutput(
int index, onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (outputBuilder_ == null) {
ensureOutputIsMutable();
output_.add(index, builderForValue.build());
onChanged();
} else {
outputBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder addAllOutput(
java.lang.Iterable values) {
if (outputBuilder_ == null) {
ensureOutputIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, output_);
onChanged();
} else {
outputBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder clearOutput() {
if (outputBuilder_ == null) {
output_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
} else {
outputBuilder_.clear();
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public Builder removeOutput(int index) {
if (outputBuilder_ == null) {
ensureOutputIsMutable();
output_.remove(index);
onChanged();
} else {
outputBuilder_.remove(index);
}
return this;
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public onnx.OnnxMl.ValueInfoProto.Builder getOutputBuilder(
int index) {
return getOutputFieldBuilder().getBuilder(index);
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public onnx.OnnxMl.ValueInfoProtoOrBuilder getOutputOrBuilder(
int index) {
if (outputBuilder_ == null) {
return output_.get(index); } else {
return outputBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public java.util.List
getOutputOrBuilderList() {
if (outputBuilder_ != null) {
return outputBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(output_);
}
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public onnx.OnnxMl.ValueInfoProto.Builder addOutputBuilder() {
return getOutputFieldBuilder().addBuilder(
onnx.OnnxMl.ValueInfoProto.getDefaultInstance());
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public onnx.OnnxMl.ValueInfoProto.Builder addOutputBuilder(
int index) {
return getOutputFieldBuilder().addBuilder(
index, onnx.OnnxMl.ValueInfoProto.getDefaultInstance());
}
/**
* repeated .onnx.ValueInfoProto output = 12;
*/
public java.util.List
getOutputBuilderList() {
return getOutputFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder>
getOutputFieldBuilder() {
if (outputBuilder_ == null) {
outputBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder>(
output_,
((bitField0_ & 0x00000010) != 0),
getParentForChildren(),
isClean());
output_ = null;
}
return outputBuilder_;
}
private java.util.List valueInfo_ =
java.util.Collections.emptyList();
private void ensureValueInfoIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
valueInfo_ = new java.util.ArrayList(valueInfo_);
bitField0_ |= 0x00000020;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder> valueInfoBuilder_;
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public java.util.List getValueInfoList() {
if (valueInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(valueInfo_);
} else {
return valueInfoBuilder_.getMessageList();
}
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public int getValueInfoCount() {
if (valueInfoBuilder_ == null) {
return valueInfo_.size();
} else {
return valueInfoBuilder_.getCount();
}
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public onnx.OnnxMl.ValueInfoProto getValueInfo(int index) {
if (valueInfoBuilder_ == null) {
return valueInfo_.get(index);
} else {
return valueInfoBuilder_.getMessage(index);
}
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder setValueInfo(
int index, onnx.OnnxMl.ValueInfoProto value) {
if (valueInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureValueInfoIsMutable();
valueInfo_.set(index, value);
onChanged();
} else {
valueInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder setValueInfo(
int index, onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (valueInfoBuilder_ == null) {
ensureValueInfoIsMutable();
valueInfo_.set(index, builderForValue.build());
onChanged();
} else {
valueInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder addValueInfo(onnx.OnnxMl.ValueInfoProto value) {
if (valueInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureValueInfoIsMutable();
valueInfo_.add(value);
onChanged();
} else {
valueInfoBuilder_.addMessage(value);
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder addValueInfo(
int index, onnx.OnnxMl.ValueInfoProto value) {
if (valueInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureValueInfoIsMutable();
valueInfo_.add(index, value);
onChanged();
} else {
valueInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder addValueInfo(
onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (valueInfoBuilder_ == null) {
ensureValueInfoIsMutable();
valueInfo_.add(builderForValue.build());
onChanged();
} else {
valueInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder addValueInfo(
int index, onnx.OnnxMl.ValueInfoProto.Builder builderForValue) {
if (valueInfoBuilder_ == null) {
ensureValueInfoIsMutable();
valueInfo_.add(index, builderForValue.build());
onChanged();
} else {
valueInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder addAllValueInfo(
java.lang.Iterable values) {
if (valueInfoBuilder_ == null) {
ensureValueInfoIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, valueInfo_);
onChanged();
} else {
valueInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder clearValueInfo() {
if (valueInfoBuilder_ == null) {
valueInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
valueInfoBuilder_.clear();
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public Builder removeValueInfo(int index) {
if (valueInfoBuilder_ == null) {
ensureValueInfoIsMutable();
valueInfo_.remove(index);
onChanged();
} else {
valueInfoBuilder_.remove(index);
}
return this;
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public onnx.OnnxMl.ValueInfoProto.Builder getValueInfoBuilder(
int index) {
return getValueInfoFieldBuilder().getBuilder(index);
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public onnx.OnnxMl.ValueInfoProtoOrBuilder getValueInfoOrBuilder(
int index) {
if (valueInfoBuilder_ == null) {
return valueInfo_.get(index); } else {
return valueInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public java.util.List
getValueInfoOrBuilderList() {
if (valueInfoBuilder_ != null) {
return valueInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(valueInfo_);
}
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public onnx.OnnxMl.ValueInfoProto.Builder addValueInfoBuilder() {
return getValueInfoFieldBuilder().addBuilder(
onnx.OnnxMl.ValueInfoProto.getDefaultInstance());
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public onnx.OnnxMl.ValueInfoProto.Builder addValueInfoBuilder(
int index) {
return getValueInfoFieldBuilder().addBuilder(
index, onnx.OnnxMl.ValueInfoProto.getDefaultInstance());
}
/**
*
* Information for the values in the graph. The ValueInfoProto.name's
* must be distinct. It is optional for a value to appear in value_info list.
*
*
* repeated .onnx.ValueInfoProto value_info = 13;
*/
public java.util.List
getValueInfoBuilderList() {
return getValueInfoFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder>
getValueInfoFieldBuilder() {
if (valueInfoBuilder_ == null) {
valueInfoBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.ValueInfoProto, onnx.OnnxMl.ValueInfoProto.Builder, onnx.OnnxMl.ValueInfoProtoOrBuilder>(
valueInfo_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
valueInfo_ = null;
}
return valueInfoBuilder_;
}
private java.util.List quantizationAnnotation_ =
java.util.Collections.emptyList();
private void ensureQuantizationAnnotationIsMutable() {
if (!((bitField0_ & 0x00000040) != 0)) {
quantizationAnnotation_ = new java.util.ArrayList(quantizationAnnotation_);
bitField0_ |= 0x00000040;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorAnnotation, onnx.OnnxMl.TensorAnnotation.Builder, onnx.OnnxMl.TensorAnnotationOrBuilder> quantizationAnnotationBuilder_;
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public java.util.List getQuantizationAnnotationList() {
if (quantizationAnnotationBuilder_ == null) {
return java.util.Collections.unmodifiableList(quantizationAnnotation_);
} else {
return quantizationAnnotationBuilder_.getMessageList();
}
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public int getQuantizationAnnotationCount() {
if (quantizationAnnotationBuilder_ == null) {
return quantizationAnnotation_.size();
} else {
return quantizationAnnotationBuilder_.getCount();
}
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public onnx.OnnxMl.TensorAnnotation getQuantizationAnnotation(int index) {
if (quantizationAnnotationBuilder_ == null) {
return quantizationAnnotation_.get(index);
} else {
return quantizationAnnotationBuilder_.getMessage(index);
}
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder setQuantizationAnnotation(
int index, onnx.OnnxMl.TensorAnnotation value) {
if (quantizationAnnotationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.set(index, value);
onChanged();
} else {
quantizationAnnotationBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder setQuantizationAnnotation(
int index, onnx.OnnxMl.TensorAnnotation.Builder builderForValue) {
if (quantizationAnnotationBuilder_ == null) {
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.set(index, builderForValue.build());
onChanged();
} else {
quantizationAnnotationBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder addQuantizationAnnotation(onnx.OnnxMl.TensorAnnotation value) {
if (quantizationAnnotationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.add(value);
onChanged();
} else {
quantizationAnnotationBuilder_.addMessage(value);
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder addQuantizationAnnotation(
int index, onnx.OnnxMl.TensorAnnotation value) {
if (quantizationAnnotationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.add(index, value);
onChanged();
} else {
quantizationAnnotationBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder addQuantizationAnnotation(
onnx.OnnxMl.TensorAnnotation.Builder builderForValue) {
if (quantizationAnnotationBuilder_ == null) {
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.add(builderForValue.build());
onChanged();
} else {
quantizationAnnotationBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder addQuantizationAnnotation(
int index, onnx.OnnxMl.TensorAnnotation.Builder builderForValue) {
if (quantizationAnnotationBuilder_ == null) {
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.add(index, builderForValue.build());
onChanged();
} else {
quantizationAnnotationBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder addAllQuantizationAnnotation(
java.lang.Iterable values) {
if (quantizationAnnotationBuilder_ == null) {
ensureQuantizationAnnotationIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, quantizationAnnotation_);
onChanged();
} else {
quantizationAnnotationBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder clearQuantizationAnnotation() {
if (quantizationAnnotationBuilder_ == null) {
quantizationAnnotation_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
} else {
quantizationAnnotationBuilder_.clear();
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public Builder removeQuantizationAnnotation(int index) {
if (quantizationAnnotationBuilder_ == null) {
ensureQuantizationAnnotationIsMutable();
quantizationAnnotation_.remove(index);
onChanged();
} else {
quantizationAnnotationBuilder_.remove(index);
}
return this;
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public onnx.OnnxMl.TensorAnnotation.Builder getQuantizationAnnotationBuilder(
int index) {
return getQuantizationAnnotationFieldBuilder().getBuilder(index);
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public onnx.OnnxMl.TensorAnnotationOrBuilder getQuantizationAnnotationOrBuilder(
int index) {
if (quantizationAnnotationBuilder_ == null) {
return quantizationAnnotation_.get(index); } else {
return quantizationAnnotationBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public java.util.List
getQuantizationAnnotationOrBuilderList() {
if (quantizationAnnotationBuilder_ != null) {
return quantizationAnnotationBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(quantizationAnnotation_);
}
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public onnx.OnnxMl.TensorAnnotation.Builder addQuantizationAnnotationBuilder() {
return getQuantizationAnnotationFieldBuilder().addBuilder(
onnx.OnnxMl.TensorAnnotation.getDefaultInstance());
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public onnx.OnnxMl.TensorAnnotation.Builder addQuantizationAnnotationBuilder(
int index) {
return getQuantizationAnnotationFieldBuilder().addBuilder(
index, onnx.OnnxMl.TensorAnnotation.getDefaultInstance());
}
/**
*
* This field carries information to indicate the mapping among a tensor and its
* quantization parameter tensors. For example:
* For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
* which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
*
*
* repeated .onnx.TensorAnnotation quantization_annotation = 14;
*/
public java.util.List
getQuantizationAnnotationBuilderList() {
return getQuantizationAnnotationFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorAnnotation, onnx.OnnxMl.TensorAnnotation.Builder, onnx.OnnxMl.TensorAnnotationOrBuilder>
getQuantizationAnnotationFieldBuilder() {
if (quantizationAnnotationBuilder_ == null) {
quantizationAnnotationBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
onnx.OnnxMl.TensorAnnotation, onnx.OnnxMl.TensorAnnotation.Builder, onnx.OnnxMl.TensorAnnotationOrBuilder>(
quantizationAnnotation_,
((bitField0_ & 0x00000040) != 0),
getParentForChildren(),
isClean());
quantizationAnnotation_ = null;
}
return quantizationAnnotationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.GraphProto)
}
// @@protoc_insertion_point(class_scope:onnx.GraphProto)
private static final onnx.OnnxMl.GraphProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.GraphProto();
}
public static onnx.OnnxMl.GraphProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public GraphProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new GraphProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.GraphProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface TensorProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.TensorProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return A list containing the dims.
*/
java.util.List getDimsList();
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return The count of dims.
*/
int getDimsCount();
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @param index The index of the element to return.
* @return The dims at the given index.
*/
long getDims(int index);
/**
*
* The data type of the tensor.
* This field MUST have a valid TensorProto.DataType value
*
*
* int32 data_type = 2;
* @return The dataType.
*/
int getDataType();
/**
* .onnx.TensorProto.Segment segment = 3;
* @return Whether the segment field is set.
*/
boolean hasSegment();
/**
* .onnx.TensorProto.Segment segment = 3;
* @return The segment.
*/
onnx.OnnxMl.TensorProto.Segment getSegment();
/**
* .onnx.TensorProto.Segment segment = 3;
*/
onnx.OnnxMl.TensorProto.SegmentOrBuilder getSegmentOrBuilder();
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return A list containing the floatData.
*/
java.util.List getFloatDataList();
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return The count of floatData.
*/
int getFloatDataCount();
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @param index The index of the element to return.
* @return The floatData at the given index.
*/
float getFloatData(int index);
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return A list containing the int32Data.
*/
java.util.List getInt32DataList();
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return The count of int32Data.
*/
int getInt32DataCount();
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @param index The index of the element to return.
* @return The int32Data at the given index.
*/
int getInt32Data(int index);
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @return A list containing the stringData.
*/
java.util.List getStringDataList();
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @return The count of stringData.
*/
int getStringDataCount();
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @param index The index of the element to return.
* @return The stringData at the given index.
*/
org.nd4j.shade.protobuf.ByteString getStringData(int index);
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
*
* repeated int64 int64_data = 7 [packed = true];
* @return A list containing the int64Data.
*/
java.util.List getInt64DataList();
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
*
* repeated int64 int64_data = 7 [packed = true];
* @return The count of int64Data.
*/
int getInt64DataCount();
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
*
* repeated int64 int64_data = 7 [packed = true];
* @param index The index of the element to return.
* @return The int64Data at the given index.
*/
long getInt64Data(int index);
/**
*
* Optionally, a name for the tensor.
*
*
* string name = 8;
* @return The name.
*/
java.lang.String getName();
/**
*
* Optionally, a name for the tensor.
*
*
* string name = 8;
* @return The bytes for name.
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* A human-readable documentation for this tensor. Markdown is allowed.
*
*
* string doc_string = 12;
* @return The docString.
*/
java.lang.String getDocString();
/**
*
* A human-readable documentation for this tensor. Markdown is allowed.
*
*
* string doc_string = 12;
* @return The bytes for docString.
*/
org.nd4j.shade.protobuf.ByteString
getDocStringBytes();
/**
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
*
* bytes raw_data = 9;
* @return The rawData.
*/
org.nd4j.shade.protobuf.ByteString getRawData();
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
java.util.List
getExternalDataList();
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
onnx.OnnxMl.StringStringEntryProto getExternalData(int index);
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
int getExternalDataCount();
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
java.util.List
getExternalDataOrBuilderList();
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
onnx.OnnxMl.StringStringEntryProtoOrBuilder getExternalDataOrBuilder(
int index);
/**
*
* If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
*
*
* .onnx.TensorProto.DataLocation data_location = 14;
* @return The enum numeric value on the wire for dataLocation.
*/
int getDataLocationValue();
/**
*
* If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
*
*
* .onnx.TensorProto.DataLocation data_location = 14;
* @return The dataLocation.
*/
onnx.OnnxMl.TensorProto.DataLocation getDataLocation();
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
* @return A list containing the doubleData.
*/
java.util.List getDoubleDataList();
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
* @return The count of doubleData.
*/
int getDoubleDataCount();
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
* @param index The index of the element to return.
* @return The doubleData at the given index.
*/
double getDoubleData(int index);
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
*
* repeated uint64 uint64_data = 11 [packed = true];
* @return A list containing the uint64Data.
*/
java.util.List getUint64DataList();
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
*
* repeated uint64 uint64_data = 11 [packed = true];
* @return The count of uint64Data.
*/
int getUint64DataCount();
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
*
* repeated uint64 uint64_data = 11 [packed = true];
* @param index The index of the element to return.
* @return The uint64Data at the given index.
*/
long getUint64Data(int index);
}
/**
*
* Tensors
* A serialized tensor value.
*
*
* Protobuf type {@code onnx.TensorProto}
*/
public static final class TensorProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.TensorProto)
TensorProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use TensorProto.newBuilder() to construct.
private TensorProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private TensorProto() {
dims_ = emptyLongList();
floatData_ = emptyFloatList();
int32Data_ = emptyIntList();
stringData_ = java.util.Collections.emptyList();
int64Data_ = emptyLongList();
name_ = "";
docString_ = "";
rawData_ = org.nd4j.shade.protobuf.ByteString.EMPTY;
externalData_ = java.util.Collections.emptyList();
dataLocation_ = 0;
doubleData_ = emptyDoubleList();
uint64Data_ = emptyLongList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TensorProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TensorProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
dims_ = newLongList();
mutable_bitField0_ |= 0x00000001;
}
dims_.addLong(input.readInt64());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
dims_ = newLongList();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
dims_.addLong(input.readInt64());
}
input.popLimit(limit);
break;
}
case 16: {
dataType_ = input.readInt32();
break;
}
case 26: {
onnx.OnnxMl.TensorProto.Segment.Builder subBuilder = null;
if (segment_ != null) {
subBuilder = segment_.toBuilder();
}
segment_ = input.readMessage(onnx.OnnxMl.TensorProto.Segment.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(segment_);
segment_ = subBuilder.buildPartial();
}
break;
}
case 37: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
floatData_ = newFloatList();
mutable_bitField0_ |= 0x00000002;
}
floatData_.addFloat(input.readFloat());
break;
}
case 34: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000002) != 0) && input.getBytesUntilLimit() > 0) {
floatData_ = newFloatList();
mutable_bitField0_ |= 0x00000002;
}
while (input.getBytesUntilLimit() > 0) {
floatData_.addFloat(input.readFloat());
}
input.popLimit(limit);
break;
}
case 40: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
int32Data_ = newIntList();
mutable_bitField0_ |= 0x00000004;
}
int32Data_.addInt(input.readInt32());
break;
}
case 42: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000004) != 0) && input.getBytesUntilLimit() > 0) {
int32Data_ = newIntList();
mutable_bitField0_ |= 0x00000004;
}
while (input.getBytesUntilLimit() > 0) {
int32Data_.addInt(input.readInt32());
}
input.popLimit(limit);
break;
}
case 50: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
stringData_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
stringData_.add(input.readBytes());
break;
}
case 56: {
if (!((mutable_bitField0_ & 0x00000010) != 0)) {
int64Data_ = newLongList();
mutable_bitField0_ |= 0x00000010;
}
int64Data_.addLong(input.readInt64());
break;
}
case 58: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000010) != 0) && input.getBytesUntilLimit() > 0) {
int64Data_ = newLongList();
mutable_bitField0_ |= 0x00000010;
}
while (input.getBytesUntilLimit() > 0) {
int64Data_.addLong(input.readInt64());
}
input.popLimit(limit);
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 74: {
rawData_ = input.readBytes();
break;
}
case 81: {
if (!((mutable_bitField0_ & 0x00000040) != 0)) {
doubleData_ = newDoubleList();
mutable_bitField0_ |= 0x00000040;
}
doubleData_.addDouble(input.readDouble());
break;
}
case 82: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000040) != 0) && input.getBytesUntilLimit() > 0) {
doubleData_ = newDoubleList();
mutable_bitField0_ |= 0x00000040;
}
while (input.getBytesUntilLimit() > 0) {
doubleData_.addDouble(input.readDouble());
}
input.popLimit(limit);
break;
}
case 88: {
if (!((mutable_bitField0_ & 0x00000080) != 0)) {
uint64Data_ = newLongList();
mutable_bitField0_ |= 0x00000080;
}
uint64Data_.addLong(input.readUInt64());
break;
}
case 90: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000080) != 0) && input.getBytesUntilLimit() > 0) {
uint64Data_ = newLongList();
mutable_bitField0_ |= 0x00000080;
}
while (input.getBytesUntilLimit() > 0) {
uint64Data_.addLong(input.readUInt64());
}
input.popLimit(limit);
break;
}
case 98: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
case 106: {
if (!((mutable_bitField0_ & 0x00000020) != 0)) {
externalData_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000020;
}
externalData_.add(
input.readMessage(onnx.OnnxMl.StringStringEntryProto.parser(), extensionRegistry));
break;
}
case 112: {
int rawValue = input.readEnum();
dataLocation_ = rawValue;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
dims_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
floatData_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
int32Data_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000008) != 0)) {
stringData_ = java.util.Collections.unmodifiableList(stringData_); // C
}
if (((mutable_bitField0_ & 0x00000010) != 0)) {
int64Data_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000040) != 0)) {
doubleData_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000080) != 0)) {
uint64Data_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000020) != 0)) {
externalData_ = java.util.Collections.unmodifiableList(externalData_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TensorProto.class, onnx.OnnxMl.TensorProto.Builder.class);
}
/**
* Protobuf enum {@code onnx.TensorProto.DataType}
*/
public enum DataType
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
* UNDEFINED = 0;
*/
UNDEFINED(0),
/**
*
* Basic types.
*
*
* FLOAT = 1;
*/
FLOAT(1),
/**
*
* uint8_t
*
*
* UINT8 = 2;
*/
UINT8(2),
/**
*
* int8_t
*
*
* INT8 = 3;
*/
INT8(3),
/**
*
* uint16_t
*
*
* UINT16 = 4;
*/
UINT16(4),
/**
*
* int16_t
*
*
* INT16 = 5;
*/
INT16(5),
/**
*
* int32_t
*
*
* INT32 = 6;
*/
INT32(6),
/**
*
* int64_t
*
*
* INT64 = 7;
*/
INT64(7),
/**
*
* string
*
*
* STRING = 8;
*/
STRING(8),
/**
*
* bool
*
*
* BOOL = 9;
*/
BOOL(9),
/**
*
* IEEE754 half-precision floating-point format (16 bits wide).
* This format has 1 sign bit, 5 exponent bits, and 10 mantissa bits.
*
*
* FLOAT16 = 10;
*/
FLOAT16(10),
/**
* DOUBLE = 11;
*/
DOUBLE(11),
/**
* UINT32 = 12;
*/
UINT32(12),
/**
* UINT64 = 13;
*/
UINT64(13),
/**
*
* complex with float32 real and imaginary components
*
*
* COMPLEX64 = 14;
*/
COMPLEX64(14),
/**
*
* complex with float64 real and imaginary components
*
*
* COMPLEX128 = 15;
*/
COMPLEX128(15),
/**
*
* Non-IEEE floating-point format based on IEEE754 single-precision
* floating-point number truncated to 16 bits.
* This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits.
*
*
* BFLOAT16 = 16;
*/
BFLOAT16(16),
UNRECOGNIZED(-1),
;
/**
* UNDEFINED = 0;
*/
public static final int UNDEFINED_VALUE = 0;
/**
*
* Basic types.
*
*
* FLOAT = 1;
*/
public static final int FLOAT_VALUE = 1;
/**
*
* uint8_t
*
*
* UINT8 = 2;
*/
public static final int UINT8_VALUE = 2;
/**
*
* int8_t
*
*
* INT8 = 3;
*/
public static final int INT8_VALUE = 3;
/**
*
* uint16_t
*
*
* UINT16 = 4;
*/
public static final int UINT16_VALUE = 4;
/**
*
* int16_t
*
*
* INT16 = 5;
*/
public static final int INT16_VALUE = 5;
/**
*
* int32_t
*
*
* INT32 = 6;
*/
public static final int INT32_VALUE = 6;
/**
*
* int64_t
*
*
* INT64 = 7;
*/
public static final int INT64_VALUE = 7;
/**
*
* string
*
*
* STRING = 8;
*/
public static final int STRING_VALUE = 8;
/**
*
* bool
*
*
* BOOL = 9;
*/
public static final int BOOL_VALUE = 9;
/**
*
* IEEE754 half-precision floating-point format (16 bits wide).
* This format has 1 sign bit, 5 exponent bits, and 10 mantissa bits.
*
*
* FLOAT16 = 10;
*/
public static final int FLOAT16_VALUE = 10;
/**
* DOUBLE = 11;
*/
public static final int DOUBLE_VALUE = 11;
/**
* UINT32 = 12;
*/
public static final int UINT32_VALUE = 12;
/**
* UINT64 = 13;
*/
public static final int UINT64_VALUE = 13;
/**
*
* complex with float32 real and imaginary components
*
*
* COMPLEX64 = 14;
*/
public static final int COMPLEX64_VALUE = 14;
/**
*
* complex with float64 real and imaginary components
*
*
* COMPLEX128 = 15;
*/
public static final int COMPLEX128_VALUE = 15;
/**
*
* Non-IEEE floating-point format based on IEEE754 single-precision
* floating-point number truncated to 16 bits.
* This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits.
*
*
* BFLOAT16 = 16;
*/
public static final int BFLOAT16_VALUE = 16;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DataType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static DataType forNumber(int value) {
switch (value) {
case 0: return UNDEFINED;
case 1: return FLOAT;
case 2: return UINT8;
case 3: return INT8;
case 4: return UINT16;
case 5: return INT16;
case 6: return INT32;
case 7: return INT64;
case 8: return STRING;
case 9: return BOOL;
case 10: return FLOAT16;
case 11: return DOUBLE;
case 12: return UINT32;
case 13: return UINT64;
case 14: return COMPLEX64;
case 15: return COMPLEX128;
case 16: return BFLOAT16;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
DataType> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public DataType findValueByNumber(int number) {
return DataType.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return onnx.OnnxMl.TensorProto.getDescriptor().getEnumTypes().get(0);
}
private static final DataType[] VALUES = values();
public static DataType valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private DataType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:onnx.TensorProto.DataType)
}
/**
*
* Location of the data for this tensor. MUST be one of:
* - DEFAULT - data stored inside the protobuf message. Data is stored in raw_data (if set) otherwise in type-specified field.
* - EXTERNAL - data stored in an external location as described by external_data field.
*
*
* Protobuf enum {@code onnx.TensorProto.DataLocation}
*/
public enum DataLocation
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
* DEFAULT = 0;
*/
DEFAULT(0),
/**
* EXTERNAL = 1;
*/
EXTERNAL(1),
UNRECOGNIZED(-1),
;
/**
* DEFAULT = 0;
*/
public static final int DEFAULT_VALUE = 0;
/**
* EXTERNAL = 1;
*/
public static final int EXTERNAL_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DataLocation valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static DataLocation forNumber(int value) {
switch (value) {
case 0: return DEFAULT;
case 1: return EXTERNAL;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
DataLocation> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public DataLocation findValueByNumber(int number) {
return DataLocation.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return onnx.OnnxMl.TensorProto.getDescriptor().getEnumTypes().get(1);
}
private static final DataLocation[] VALUES = values();
public static DataLocation valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private DataLocation(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:onnx.TensorProto.DataLocation)
}
public interface SegmentOrBuilder extends
// @@protoc_insertion_point(interface_extends:onnx.TensorProto.Segment)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* int64 begin = 1;
* @return The begin.
*/
long getBegin();
/**
* int64 end = 2;
* @return The end.
*/
long getEnd();
}
/**
*
* For very large tensors, we may want to store them in chunks, in which
* case the following fields will specify the segment that is stored in
* the current TensorProto.
*
*
* Protobuf type {@code onnx.TensorProto.Segment}
*/
public static final class Segment extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:onnx.TensorProto.Segment)
SegmentOrBuilder {
private static final long serialVersionUID = 0L;
// Use Segment.newBuilder() to construct.
private Segment(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private Segment() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new Segment();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Segment(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
begin_ = input.readInt64();
break;
}
case 16: {
end_ = input.readInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (org.nd4j.shade.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_Segment_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_Segment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TensorProto.Segment.class, onnx.OnnxMl.TensorProto.Segment.Builder.class);
}
public static final int BEGIN_FIELD_NUMBER = 1;
private long begin_;
/**
* int64 begin = 1;
* @return The begin.
*/
@java.lang.Override
public long getBegin() {
return begin_;
}
public static final int END_FIELD_NUMBER = 2;
private long end_;
/**
* int64 end = 2;
* @return The end.
*/
@java.lang.Override
public long getEnd() {
return end_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (begin_ != 0L) {
output.writeInt64(1, begin_);
}
if (end_ != 0L) {
output.writeInt64(2, end_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (begin_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(1, begin_);
}
if (end_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(2, end_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.TensorProto.Segment)) {
return super.equals(obj);
}
onnx.OnnxMl.TensorProto.Segment other = (onnx.OnnxMl.TensorProto.Segment) obj;
if (getBegin()
!= other.getBegin()) return false;
if (getEnd()
!= other.getEnd()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BEGIN_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getBegin());
hash = (37 * hash) + END_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getEnd());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto.Segment parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorProto.Segment parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.TensorProto.Segment prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* For very large tensors, we may want to store them in chunks, in which
* case the following fields will specify the segment that is stored in
* the current TensorProto.
*
*
* Protobuf type {@code onnx.TensorProto.Segment}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.TensorProto.Segment)
onnx.OnnxMl.TensorProto.SegmentOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_Segment_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_Segment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TensorProto.Segment.class, onnx.OnnxMl.TensorProto.Segment.Builder.class);
}
// Construct using onnx.OnnxMl.TensorProto.Segment.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
begin_ = 0L;
end_ = 0L;
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_Segment_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.TensorProto.Segment getDefaultInstanceForType() {
return onnx.OnnxMl.TensorProto.Segment.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.TensorProto.Segment build() {
onnx.OnnxMl.TensorProto.Segment result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.TensorProto.Segment buildPartial() {
onnx.OnnxMl.TensorProto.Segment result = new onnx.OnnxMl.TensorProto.Segment(this);
result.begin_ = begin_;
result.end_ = end_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.TensorProto.Segment) {
return mergeFrom((onnx.OnnxMl.TensorProto.Segment)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.TensorProto.Segment other) {
if (other == onnx.OnnxMl.TensorProto.Segment.getDefaultInstance()) return this;
if (other.getBegin() != 0L) {
setBegin(other.getBegin());
}
if (other.getEnd() != 0L) {
setEnd(other.getEnd());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.TensorProto.Segment parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.TensorProto.Segment) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private long begin_ ;
/**
* int64 begin = 1;
* @return The begin.
*/
@java.lang.Override
public long getBegin() {
return begin_;
}
/**
* int64 begin = 1;
* @param value The begin to set.
* @return This builder for chaining.
*/
public Builder setBegin(long value) {
begin_ = value;
onChanged();
return this;
}
/**
* int64 begin = 1;
* @return This builder for chaining.
*/
public Builder clearBegin() {
begin_ = 0L;
onChanged();
return this;
}
private long end_ ;
/**
* int64 end = 2;
* @return The end.
*/
@java.lang.Override
public long getEnd() {
return end_;
}
/**
* int64 end = 2;
* @param value The end to set.
* @return This builder for chaining.
*/
public Builder setEnd(long value) {
end_ = value;
onChanged();
return this;
}
/**
* int64 end = 2;
* @return This builder for chaining.
*/
public Builder clearEnd() {
end_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:onnx.TensorProto.Segment)
}
// @@protoc_insertion_point(class_scope:onnx.TensorProto.Segment)
private static final onnx.OnnxMl.TensorProto.Segment DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new onnx.OnnxMl.TensorProto.Segment();
}
public static onnx.OnnxMl.TensorProto.Segment getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public Segment parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new Segment(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public onnx.OnnxMl.TensorProto.Segment getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int DIMS_FIELD_NUMBER = 1;
private org.nd4j.shade.protobuf.Internal.LongList dims_;
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return A list containing the dims.
*/
@java.lang.Override
public java.util.List
getDimsList() {
return dims_;
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return The count of dims.
*/
public int getDimsCount() {
return dims_.size();
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @param index The index of the element to return.
* @return The dims at the given index.
*/
public long getDims(int index) {
return dims_.getLong(index);
}
private int dimsMemoizedSerializedSize = -1;
public static final int DATA_TYPE_FIELD_NUMBER = 2;
private int dataType_;
/**
*
* The data type of the tensor.
* This field MUST have a valid TensorProto.DataType value
*
*
* int32 data_type = 2;
* @return The dataType.
*/
@java.lang.Override
public int getDataType() {
return dataType_;
}
public static final int SEGMENT_FIELD_NUMBER = 3;
private onnx.OnnxMl.TensorProto.Segment segment_;
/**
* .onnx.TensorProto.Segment segment = 3;
* @return Whether the segment field is set.
*/
@java.lang.Override
public boolean hasSegment() {
return segment_ != null;
}
/**
* .onnx.TensorProto.Segment segment = 3;
* @return The segment.
*/
@java.lang.Override
public onnx.OnnxMl.TensorProto.Segment getSegment() {
return segment_ == null ? onnx.OnnxMl.TensorProto.Segment.getDefaultInstance() : segment_;
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
@java.lang.Override
public onnx.OnnxMl.TensorProto.SegmentOrBuilder getSegmentOrBuilder() {
return getSegment();
}
public static final int FLOAT_DATA_FIELD_NUMBER = 4;
private org.nd4j.shade.protobuf.Internal.FloatList floatData_;
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return A list containing the floatData.
*/
@java.lang.Override
public java.util.List
getFloatDataList() {
return floatData_;
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return The count of floatData.
*/
public int getFloatDataCount() {
return floatData_.size();
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @param index The index of the element to return.
* @return The floatData at the given index.
*/
public float getFloatData(int index) {
return floatData_.getFloat(index);
}
private int floatDataMemoizedSerializedSize = -1;
public static final int INT32_DATA_FIELD_NUMBER = 5;
private org.nd4j.shade.protobuf.Internal.IntList int32Data_;
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return A list containing the int32Data.
*/
@java.lang.Override
public java.util.List
getInt32DataList() {
return int32Data_;
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return The count of int32Data.
*/
public int getInt32DataCount() {
return int32Data_.size();
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @param index The index of the element to return.
* @return The int32Data at the given index.
*/
public int getInt32Data(int index) {
return int32Data_.getInt(index);
}
private int int32DataMemoizedSerializedSize = -1;
public static final int STRING_DATA_FIELD_NUMBER = 6;
private java.util.List stringData_;
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @return A list containing the stringData.
*/
@java.lang.Override
public java.util.List
getStringDataList() {
return stringData_;
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @return The count of stringData.
*/
public int getStringDataCount() {
return stringData_.size();
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @param index The index of the element to return.
* @return The stringData at the given index.
*/
public org.nd4j.shade.protobuf.ByteString getStringData(int index) {
return stringData_.get(index);
}
public static final int INT64_DATA_FIELD_NUMBER = 7;
private org.nd4j.shade.protobuf.Internal.LongList int64Data_;
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
*
* repeated int64 int64_data = 7 [packed = true];
* @return A list containing the int64Data.
*/
@java.lang.Override
public java.util.List
getInt64DataList() {
return int64Data_;
}
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
*
* repeated int64 int64_data = 7 [packed = true];
* @return The count of int64Data.
*/
public int getInt64DataCount() {
return int64Data_.size();
}
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
*
* repeated int64 int64_data = 7 [packed = true];
* @param index The index of the element to return.
* @return The int64Data at the given index.
*/
public long getInt64Data(int index) {
return int64Data_.getLong(index);
}
private int int64DataMemoizedSerializedSize = -1;
public static final int NAME_FIELD_NUMBER = 8;
private volatile java.lang.Object name_;
/**
*
* Optionally, a name for the tensor.
*
*
* string name = 8;
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
* Optionally, a name for the tensor.
*
*
* string name = 8;
* @return The bytes for name.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DOC_STRING_FIELD_NUMBER = 12;
private volatile java.lang.Object docString_;
/**
*
* A human-readable documentation for this tensor. Markdown is allowed.
*
*
* string doc_string = 12;
* @return The docString.
*/
@java.lang.Override
public java.lang.String getDocString() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
docString_ = s;
return s;
}
}
/**
*
* A human-readable documentation for this tensor. Markdown is allowed.
*
*
* string doc_string = 12;
* @return The bytes for docString.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int RAW_DATA_FIELD_NUMBER = 9;
private org.nd4j.shade.protobuf.ByteString rawData_;
/**
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
*
* bytes raw_data = 9;
* @return The rawData.
*/
@java.lang.Override
public org.nd4j.shade.protobuf.ByteString getRawData() {
return rawData_;
}
public static final int EXTERNAL_DATA_FIELD_NUMBER = 13;
private java.util.List externalData_;
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
@java.lang.Override
public java.util.List getExternalDataList() {
return externalData_;
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
@java.lang.Override
public java.util.List
getExternalDataOrBuilderList() {
return externalData_;
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
@java.lang.Override
public int getExternalDataCount() {
return externalData_.size();
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProto getExternalData(int index) {
return externalData_.get(index);
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .onnx.StringStringEntryProto external_data = 13;
*/
@java.lang.Override
public onnx.OnnxMl.StringStringEntryProtoOrBuilder getExternalDataOrBuilder(
int index) {
return externalData_.get(index);
}
public static final int DATA_LOCATION_FIELD_NUMBER = 14;
private int dataLocation_;
/**
*
* If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
*
*
* .onnx.TensorProto.DataLocation data_location = 14;
* @return The enum numeric value on the wire for dataLocation.
*/
@java.lang.Override public int getDataLocationValue() {
return dataLocation_;
}
/**
*
* If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
*
*
* .onnx.TensorProto.DataLocation data_location = 14;
* @return The dataLocation.
*/
@java.lang.Override public onnx.OnnxMl.TensorProto.DataLocation getDataLocation() {
@SuppressWarnings("deprecation")
onnx.OnnxMl.TensorProto.DataLocation result = onnx.OnnxMl.TensorProto.DataLocation.valueOf(dataLocation_);
return result == null ? onnx.OnnxMl.TensorProto.DataLocation.UNRECOGNIZED : result;
}
public static final int DOUBLE_DATA_FIELD_NUMBER = 10;
private org.nd4j.shade.protobuf.Internal.DoubleList doubleData_;
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
* @return A list containing the doubleData.
*/
@java.lang.Override
public java.util.List
getDoubleDataList() {
return doubleData_;
}
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
* @return The count of doubleData.
*/
public int getDoubleDataCount() {
return doubleData_.size();
}
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
* @param index The index of the element to return.
* @return The doubleData at the given index.
*/
public double getDoubleData(int index) {
return doubleData_.getDouble(index);
}
private int doubleDataMemoizedSerializedSize = -1;
public static final int UINT64_DATA_FIELD_NUMBER = 11;
private org.nd4j.shade.protobuf.Internal.LongList uint64Data_;
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
*
* repeated uint64 uint64_data = 11 [packed = true];
* @return A list containing the uint64Data.
*/
@java.lang.Override
public java.util.List
getUint64DataList() {
return uint64Data_;
}
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
*
* repeated uint64 uint64_data = 11 [packed = true];
* @return The count of uint64Data.
*/
public int getUint64DataCount() {
return uint64Data_.size();
}
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
*
* repeated uint64 uint64_data = 11 [packed = true];
* @param index The index of the element to return.
* @return The uint64Data at the given index.
*/
public long getUint64Data(int index) {
return uint64Data_.getLong(index);
}
private int uint64DataMemoizedSerializedSize = -1;
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (getDimsList().size() > 0) {
output.writeUInt32NoTag(10);
output.writeUInt32NoTag(dimsMemoizedSerializedSize);
}
for (int i = 0; i < dims_.size(); i++) {
output.writeInt64NoTag(dims_.getLong(i));
}
if (dataType_ != 0) {
output.writeInt32(2, dataType_);
}
if (segment_ != null) {
output.writeMessage(3, getSegment());
}
if (getFloatDataList().size() > 0) {
output.writeUInt32NoTag(34);
output.writeUInt32NoTag(floatDataMemoizedSerializedSize);
}
for (int i = 0; i < floatData_.size(); i++) {
output.writeFloatNoTag(floatData_.getFloat(i));
}
if (getInt32DataList().size() > 0) {
output.writeUInt32NoTag(42);
output.writeUInt32NoTag(int32DataMemoizedSerializedSize);
}
for (int i = 0; i < int32Data_.size(); i++) {
output.writeInt32NoTag(int32Data_.getInt(i));
}
for (int i = 0; i < stringData_.size(); i++) {
output.writeBytes(6, stringData_.get(i));
}
if (getInt64DataList().size() > 0) {
output.writeUInt32NoTag(58);
output.writeUInt32NoTag(int64DataMemoizedSerializedSize);
}
for (int i = 0; i < int64Data_.size(); i++) {
output.writeInt64NoTag(int64Data_.getLong(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 8, name_);
}
if (!rawData_.isEmpty()) {
output.writeBytes(9, rawData_);
}
if (getDoubleDataList().size() > 0) {
output.writeUInt32NoTag(82);
output.writeUInt32NoTag(doubleDataMemoizedSerializedSize);
}
for (int i = 0; i < doubleData_.size(); i++) {
output.writeDoubleNoTag(doubleData_.getDouble(i));
}
if (getUint64DataList().size() > 0) {
output.writeUInt32NoTag(90);
output.writeUInt32NoTag(uint64DataMemoizedSerializedSize);
}
for (int i = 0; i < uint64Data_.size(); i++) {
output.writeUInt64NoTag(uint64Data_.getLong(i));
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 12, docString_);
}
for (int i = 0; i < externalData_.size(); i++) {
output.writeMessage(13, externalData_.get(i));
}
if (dataLocation_ != onnx.OnnxMl.TensorProto.DataLocation.DEFAULT.getNumber()) {
output.writeEnum(14, dataLocation_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < dims_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64SizeNoTag(dims_.getLong(i));
}
size += dataSize;
if (!getDimsList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
dimsMemoizedSerializedSize = dataSize;
}
if (dataType_ != 0) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32Size(2, dataType_);
}
if (segment_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(3, getSegment());
}
{
int dataSize = 0;
dataSize = 4 * getFloatDataList().size();
size += dataSize;
if (!getFloatDataList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
floatDataMemoizedSerializedSize = dataSize;
}
{
int dataSize = 0;
for (int i = 0; i < int32Data_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(int32Data_.getInt(i));
}
size += dataSize;
if (!getInt32DataList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
int32DataMemoizedSerializedSize = dataSize;
}
{
int dataSize = 0;
for (int i = 0; i < stringData_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeBytesSizeNoTag(stringData_.get(i));
}
size += dataSize;
size += 1 * getStringDataList().size();
}
{
int dataSize = 0;
for (int i = 0; i < int64Data_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64SizeNoTag(int64Data_.getLong(i));
}
size += dataSize;
if (!getInt64DataList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
int64DataMemoizedSerializedSize = dataSize;
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(8, name_);
}
if (!rawData_.isEmpty()) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeBytesSize(9, rawData_);
}
{
int dataSize = 0;
dataSize = 8 * getDoubleDataList().size();
size += dataSize;
if (!getDoubleDataList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
doubleDataMemoizedSerializedSize = dataSize;
}
{
int dataSize = 0;
for (int i = 0; i < uint64Data_.size(); i++) {
dataSize += org.nd4j.shade.protobuf.CodedOutputStream
.computeUInt64SizeNoTag(uint64Data_.getLong(i));
}
size += dataSize;
if (!getUint64DataList().isEmpty()) {
size += 1;
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt32SizeNoTag(dataSize);
}
uint64DataMemoizedSerializedSize = dataSize;
}
if (!org.nd4j.shade.protobuf.GeneratedMessageV3.isStringEmpty(docString_)) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(12, docString_);
}
for (int i = 0; i < externalData_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(13, externalData_.get(i));
}
if (dataLocation_ != onnx.OnnxMl.TensorProto.DataLocation.DEFAULT.getNumber()) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeEnumSize(14, dataLocation_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof onnx.OnnxMl.TensorProto)) {
return super.equals(obj);
}
onnx.OnnxMl.TensorProto other = (onnx.OnnxMl.TensorProto) obj;
if (!getDimsList()
.equals(other.getDimsList())) return false;
if (getDataType()
!= other.getDataType()) return false;
if (hasSegment() != other.hasSegment()) return false;
if (hasSegment()) {
if (!getSegment()
.equals(other.getSegment())) return false;
}
if (!getFloatDataList()
.equals(other.getFloatDataList())) return false;
if (!getInt32DataList()
.equals(other.getInt32DataList())) return false;
if (!getStringDataList()
.equals(other.getStringDataList())) return false;
if (!getInt64DataList()
.equals(other.getInt64DataList())) return false;
if (!getName()
.equals(other.getName())) return false;
if (!getDocString()
.equals(other.getDocString())) return false;
if (!getRawData()
.equals(other.getRawData())) return false;
if (!getExternalDataList()
.equals(other.getExternalDataList())) return false;
if (dataLocation_ != other.dataLocation_) return false;
if (!getDoubleDataList()
.equals(other.getDoubleDataList())) return false;
if (!getUint64DataList()
.equals(other.getUint64DataList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDimsCount() > 0) {
hash = (37 * hash) + DIMS_FIELD_NUMBER;
hash = (53 * hash) + getDimsList().hashCode();
}
hash = (37 * hash) + DATA_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getDataType();
if (hasSegment()) {
hash = (37 * hash) + SEGMENT_FIELD_NUMBER;
hash = (53 * hash) + getSegment().hashCode();
}
if (getFloatDataCount() > 0) {
hash = (37 * hash) + FLOAT_DATA_FIELD_NUMBER;
hash = (53 * hash) + getFloatDataList().hashCode();
}
if (getInt32DataCount() > 0) {
hash = (37 * hash) + INT32_DATA_FIELD_NUMBER;
hash = (53 * hash) + getInt32DataList().hashCode();
}
if (getStringDataCount() > 0) {
hash = (37 * hash) + STRING_DATA_FIELD_NUMBER;
hash = (53 * hash) + getStringDataList().hashCode();
}
if (getInt64DataCount() > 0) {
hash = (37 * hash) + INT64_DATA_FIELD_NUMBER;
hash = (53 * hash) + getInt64DataList().hashCode();
}
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DOC_STRING_FIELD_NUMBER;
hash = (53 * hash) + getDocString().hashCode();
hash = (37 * hash) + RAW_DATA_FIELD_NUMBER;
hash = (53 * hash) + getRawData().hashCode();
if (getExternalDataCount() > 0) {
hash = (37 * hash) + EXTERNAL_DATA_FIELD_NUMBER;
hash = (53 * hash) + getExternalDataList().hashCode();
}
hash = (37 * hash) + DATA_LOCATION_FIELD_NUMBER;
hash = (53 * hash) + dataLocation_;
if (getDoubleDataCount() > 0) {
hash = (37 * hash) + DOUBLE_DATA_FIELD_NUMBER;
hash = (53 * hash) + getDoubleDataList().hashCode();
}
if (getUint64DataCount() > 0) {
hash = (37 * hash) + UINT64_DATA_FIELD_NUMBER;
hash = (53 * hash) + getUint64DataList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static onnx.OnnxMl.TensorProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static onnx.OnnxMl.TensorProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static onnx.OnnxMl.TensorProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static onnx.OnnxMl.TensorProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(onnx.OnnxMl.TensorProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Tensors
* A serialized tensor value.
*
*
* Protobuf type {@code onnx.TensorProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:onnx.TensorProto)
onnx.OnnxMl.TensorProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
onnx.OnnxMl.TensorProto.class, onnx.OnnxMl.TensorProto.Builder.class);
}
// Construct using onnx.OnnxMl.TensorProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getExternalDataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
dims_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000001);
dataType_ = 0;
if (segmentBuilder_ == null) {
segment_ = null;
} else {
segment_ = null;
segmentBuilder_ = null;
}
floatData_ = emptyFloatList();
bitField0_ = (bitField0_ & ~0x00000002);
int32Data_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000004);
stringData_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
int64Data_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000010);
name_ = "";
docString_ = "";
rawData_ = org.nd4j.shade.protobuf.ByteString.EMPTY;
if (externalDataBuilder_ == null) {
externalData_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
} else {
externalDataBuilder_.clear();
}
dataLocation_ = 0;
doubleData_ = emptyDoubleList();
bitField0_ = (bitField0_ & ~0x00000040);
uint64Data_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000080);
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return onnx.OnnxMl.internal_static_onnx_TensorProto_descriptor;
}
@java.lang.Override
public onnx.OnnxMl.TensorProto getDefaultInstanceForType() {
return onnx.OnnxMl.TensorProto.getDefaultInstance();
}
@java.lang.Override
public onnx.OnnxMl.TensorProto build() {
onnx.OnnxMl.TensorProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public onnx.OnnxMl.TensorProto buildPartial() {
onnx.OnnxMl.TensorProto result = new onnx.OnnxMl.TensorProto(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
dims_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dims_ = dims_;
result.dataType_ = dataType_;
if (segmentBuilder_ == null) {
result.segment_ = segment_;
} else {
result.segment_ = segmentBuilder_.build();
}
if (((bitField0_ & 0x00000002) != 0)) {
floatData_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.floatData_ = floatData_;
if (((bitField0_ & 0x00000004) != 0)) {
int32Data_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000004);
}
result.int32Data_ = int32Data_;
if (((bitField0_ & 0x00000008) != 0)) {
stringData_ = java.util.Collections.unmodifiableList(stringData_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.stringData_ = stringData_;
if (((bitField0_ & 0x00000010) != 0)) {
int64Data_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000010);
}
result.int64Data_ = int64Data_;
result.name_ = name_;
result.docString_ = docString_;
result.rawData_ = rawData_;
if (externalDataBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
externalData_ = java.util.Collections.unmodifiableList(externalData_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.externalData_ = externalData_;
} else {
result.externalData_ = externalDataBuilder_.build();
}
result.dataLocation_ = dataLocation_;
if (((bitField0_ & 0x00000040) != 0)) {
doubleData_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000040);
}
result.doubleData_ = doubleData_;
if (((bitField0_ & 0x00000080) != 0)) {
uint64Data_.makeImmutable();
bitField0_ = (bitField0_ & ~0x00000080);
}
result.uint64Data_ = uint64Data_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof onnx.OnnxMl.TensorProto) {
return mergeFrom((onnx.OnnxMl.TensorProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(onnx.OnnxMl.TensorProto other) {
if (other == onnx.OnnxMl.TensorProto.getDefaultInstance()) return this;
if (!other.dims_.isEmpty()) {
if (dims_.isEmpty()) {
dims_ = other.dims_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDimsIsMutable();
dims_.addAll(other.dims_);
}
onChanged();
}
if (other.getDataType() != 0) {
setDataType(other.getDataType());
}
if (other.hasSegment()) {
mergeSegment(other.getSegment());
}
if (!other.floatData_.isEmpty()) {
if (floatData_.isEmpty()) {
floatData_ = other.floatData_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFloatDataIsMutable();
floatData_.addAll(other.floatData_);
}
onChanged();
}
if (!other.int32Data_.isEmpty()) {
if (int32Data_.isEmpty()) {
int32Data_ = other.int32Data_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureInt32DataIsMutable();
int32Data_.addAll(other.int32Data_);
}
onChanged();
}
if (!other.stringData_.isEmpty()) {
if (stringData_.isEmpty()) {
stringData_ = other.stringData_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureStringDataIsMutable();
stringData_.addAll(other.stringData_);
}
onChanged();
}
if (!other.int64Data_.isEmpty()) {
if (int64Data_.isEmpty()) {
int64Data_ = other.int64Data_;
bitField0_ = (bitField0_ & ~0x00000010);
} else {
ensureInt64DataIsMutable();
int64Data_.addAll(other.int64Data_);
}
onChanged();
}
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getDocString().isEmpty()) {
docString_ = other.docString_;
onChanged();
}
if (other.getRawData() != org.nd4j.shade.protobuf.ByteString.EMPTY) {
setRawData(other.getRawData());
}
if (externalDataBuilder_ == null) {
if (!other.externalData_.isEmpty()) {
if (externalData_.isEmpty()) {
externalData_ = other.externalData_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureExternalDataIsMutable();
externalData_.addAll(other.externalData_);
}
onChanged();
}
} else {
if (!other.externalData_.isEmpty()) {
if (externalDataBuilder_.isEmpty()) {
externalDataBuilder_.dispose();
externalDataBuilder_ = null;
externalData_ = other.externalData_;
bitField0_ = (bitField0_ & ~0x00000020);
externalDataBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getExternalDataFieldBuilder() : null;
} else {
externalDataBuilder_.addAllMessages(other.externalData_);
}
}
}
if (other.dataLocation_ != 0) {
setDataLocationValue(other.getDataLocationValue());
}
if (!other.doubleData_.isEmpty()) {
if (doubleData_.isEmpty()) {
doubleData_ = other.doubleData_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureDoubleDataIsMutable();
doubleData_.addAll(other.doubleData_);
}
onChanged();
}
if (!other.uint64Data_.isEmpty()) {
if (uint64Data_.isEmpty()) {
uint64Data_ = other.uint64Data_;
bitField0_ = (bitField0_ & ~0x00000080);
} else {
ensureUint64DataIsMutable();
uint64Data_.addAll(other.uint64Data_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
onnx.OnnxMl.TensorProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (onnx.OnnxMl.TensorProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private org.nd4j.shade.protobuf.Internal.LongList dims_ = emptyLongList();
private void ensureDimsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dims_ = mutableCopy(dims_);
bitField0_ |= 0x00000001;
}
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return A list containing the dims.
*/
public java.util.List
getDimsList() {
return ((bitField0_ & 0x00000001) != 0) ?
java.util.Collections.unmodifiableList(dims_) : dims_;
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return The count of dims.
*/
public int getDimsCount() {
return dims_.size();
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @param index The index of the element to return.
* @return The dims at the given index.
*/
public long getDims(int index) {
return dims_.getLong(index);
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @param index The index to set the value at.
* @param value The dims to set.
* @return This builder for chaining.
*/
public Builder setDims(
int index, long value) {
ensureDimsIsMutable();
dims_.setLong(index, value);
onChanged();
return this;
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @param value The dims to add.
* @return This builder for chaining.
*/
public Builder addDims(long value) {
ensureDimsIsMutable();
dims_.addLong(value);
onChanged();
return this;
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @param values The dims to add.
* @return This builder for chaining.
*/
public Builder addAllDims(
java.lang.Iterable values) {
ensureDimsIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, dims_);
onChanged();
return this;
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
* @return This builder for chaining.
*/
public Builder clearDims() {
dims_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
private int dataType_ ;
/**
*
* The data type of the tensor.
* This field MUST have a valid TensorProto.DataType value
*
*
* int32 data_type = 2;
* @return The dataType.
*/
@java.lang.Override
public int getDataType() {
return dataType_;
}
/**
*
* The data type of the tensor.
* This field MUST have a valid TensorProto.DataType value
*
*
* int32 data_type = 2;
* @param value The dataType to set.
* @return This builder for chaining.
*/
public Builder setDataType(int value) {
dataType_ = value;
onChanged();
return this;
}
/**
*
* The data type of the tensor.
* This field MUST have a valid TensorProto.DataType value
*
*
* int32 data_type = 2;
* @return This builder for chaining.
*/
public Builder clearDataType() {
dataType_ = 0;
onChanged();
return this;
}
private onnx.OnnxMl.TensorProto.Segment segment_;
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TensorProto.Segment, onnx.OnnxMl.TensorProto.Segment.Builder, onnx.OnnxMl.TensorProto.SegmentOrBuilder> segmentBuilder_;
/**
* .onnx.TensorProto.Segment segment = 3;
* @return Whether the segment field is set.
*/
public boolean hasSegment() {
return segmentBuilder_ != null || segment_ != null;
}
/**
* .onnx.TensorProto.Segment segment = 3;
* @return The segment.
*/
public onnx.OnnxMl.TensorProto.Segment getSegment() {
if (segmentBuilder_ == null) {
return segment_ == null ? onnx.OnnxMl.TensorProto.Segment.getDefaultInstance() : segment_;
} else {
return segmentBuilder_.getMessage();
}
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
public Builder setSegment(onnx.OnnxMl.TensorProto.Segment value) {
if (segmentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
segment_ = value;
onChanged();
} else {
segmentBuilder_.setMessage(value);
}
return this;
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
public Builder setSegment(
onnx.OnnxMl.TensorProto.Segment.Builder builderForValue) {
if (segmentBuilder_ == null) {
segment_ = builderForValue.build();
onChanged();
} else {
segmentBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
public Builder mergeSegment(onnx.OnnxMl.TensorProto.Segment value) {
if (segmentBuilder_ == null) {
if (segment_ != null) {
segment_ =
onnx.OnnxMl.TensorProto.Segment.newBuilder(segment_).mergeFrom(value).buildPartial();
} else {
segment_ = value;
}
onChanged();
} else {
segmentBuilder_.mergeFrom(value);
}
return this;
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
public Builder clearSegment() {
if (segmentBuilder_ == null) {
segment_ = null;
onChanged();
} else {
segment_ = null;
segmentBuilder_ = null;
}
return this;
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
public onnx.OnnxMl.TensorProto.Segment.Builder getSegmentBuilder() {
onChanged();
return getSegmentFieldBuilder().getBuilder();
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
public onnx.OnnxMl.TensorProto.SegmentOrBuilder getSegmentOrBuilder() {
if (segmentBuilder_ != null) {
return segmentBuilder_.getMessageOrBuilder();
} else {
return segment_ == null ?
onnx.OnnxMl.TensorProto.Segment.getDefaultInstance() : segment_;
}
}
/**
* .onnx.TensorProto.Segment segment = 3;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TensorProto.Segment, onnx.OnnxMl.TensorProto.Segment.Builder, onnx.OnnxMl.TensorProto.SegmentOrBuilder>
getSegmentFieldBuilder() {
if (segmentBuilder_ == null) {
segmentBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
onnx.OnnxMl.TensorProto.Segment, onnx.OnnxMl.TensorProto.Segment.Builder, onnx.OnnxMl.TensorProto.SegmentOrBuilder>(
getSegment(),
getParentForChildren(),
isClean());
segment_ = null;
}
return segmentBuilder_;
}
private org.nd4j.shade.protobuf.Internal.FloatList floatData_ = emptyFloatList();
private void ensureFloatDataIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
floatData_ = mutableCopy(floatData_);
bitField0_ |= 0x00000002;
}
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return A list containing the floatData.
*/
public java.util.List
getFloatDataList() {
return ((bitField0_ & 0x00000002) != 0) ?
java.util.Collections.unmodifiableList(floatData_) : floatData_;
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return The count of floatData.
*/
public int getFloatDataCount() {
return floatData_.size();
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @param index The index of the element to return.
* @return The floatData at the given index.
*/
public float getFloatData(int index) {
return floatData_.getFloat(index);
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @param index The index to set the value at.
* @param value The floatData to set.
* @return This builder for chaining.
*/
public Builder setFloatData(
int index, float value) {
ensureFloatDataIsMutable();
floatData_.setFloat(index, value);
onChanged();
return this;
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @param value The floatData to add.
* @return This builder for chaining.
*/
public Builder addFloatData(float value) {
ensureFloatDataIsMutable();
floatData_.addFloat(value);
onChanged();
return this;
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @param values The floatData to add.
* @return This builder for chaining.
*/
public Builder addAllFloatData(
java.lang.Iterable values) {
ensureFloatDataIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, floatData_);
onChanged();
return this;
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
* @return This builder for chaining.
*/
public Builder clearFloatData() {
floatData_ = emptyFloatList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
private org.nd4j.shade.protobuf.Internal.IntList int32Data_ = emptyIntList();
private void ensureInt32DataIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
int32Data_ = mutableCopy(int32Data_);
bitField0_ |= 0x00000004;
}
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return A list containing the int32Data.
*/
public java.util.List
getInt32DataList() {
return ((bitField0_ & 0x00000004) != 0) ?
java.util.Collections.unmodifiableList(int32Data_) : int32Data_;
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return The count of int32Data.
*/
public int getInt32DataCount() {
return int32Data_.size();
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @param index The index of the element to return.
* @return The int32Data at the given index.
*/
public int getInt32Data(int index) {
return int32Data_.getInt(index);
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @param index The index to set the value at.
* @param value The int32Data to set.
* @return This builder for chaining.
*/
public Builder setInt32Data(
int index, int value) {
ensureInt32DataIsMutable();
int32Data_.setInt(index, value);
onChanged();
return this;
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @param value The int32Data to add.
* @return This builder for chaining.
*/
public Builder addInt32Data(int value) {
ensureInt32DataIsMutable();
int32Data_.addInt(value);
onChanged();
return this;
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @param values The int32Data to add.
* @return This builder for chaining.
*/
public Builder addAllInt32Data(
java.lang.Iterable values) {
ensureInt32DataIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, int32Data_);
onChanged();
return this;
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
* @return This builder for chaining.
*/
public Builder clearInt32Data() {
int32Data_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
private java.util.List stringData_ = java.util.Collections.emptyList();
private void ensureStringDataIsMutable() {
if (!((bitField0_ & 0x00000008) != 0)) {
stringData_ = new java.util.ArrayList(stringData_);
bitField0_ |= 0x00000008;
}
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
* @return A list containing the stringData.
*/
public java.util.List
getStringDataList() {
return ((bitField0_ & 0x00000008) != 0) ?
java.util.Collections.unmodifiableList(stringData_) : stringData_;
}
/**
*