Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tensorflow/core/framework/api_def.proto
package org.tensorflow.framework;
/**
*
* Used to specify and override the default API & behavior in the
* generated code for client languages, from what you would get from
* the OpDef alone. There will be a set of ApiDefs that are common
* to all client languages, and another set per client language.
* The per-client-language ApiDefs will inherit values from the
* common ApiDefs which it can either replace or modify.
* We separate the API definition from the OpDef so we can evolve the
* API while remaining backwards compatible when interpretting old
* graphs. Overrides go in an "api_def.pbtxt" file with a text-format
* ApiDefs message.
* WARNING: Be *very* careful changing the API for any existing op --
* you can change the semantics of existing code. These changes may
* need to wait until a major release of TensorFlow to avoid breaking
* our compatibility promises.
*
*
* Protobuf type {@code tensorflow.ApiDef}
*/
public final class ApiDef extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.ApiDef)
ApiDefOrBuilder {
private static final long serialVersionUID = 0L;
// Use ApiDef.newBuilder() to construct.
private ApiDef(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ApiDef() {
graphOpName_ = "";
visibility_ = 0;
endpoint_ = java.util.Collections.emptyList();
inArg_ = java.util.Collections.emptyList();
outArg_ = java.util.Collections.emptyList();
argOrder_ = org.nd4j.shade.protobuf.LazyStringArrayList.EMPTY;
attr_ = java.util.Collections.emptyList();
summary_ = "";
description_ = "";
descriptionPrefix_ = "";
descriptionSuffix_ = "";
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ApiDef(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
graphOpName_ = s;
break;
}
case 16: {
int rawValue = input.readEnum();
visibility_ = rawValue;
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
endpoint_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
endpoint_.add(
input.readMessage(org.tensorflow.framework.ApiDef.Endpoint.parser(), extensionRegistry));
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
inArg_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
inArg_.add(
input.readMessage(org.tensorflow.framework.ApiDef.Arg.parser(), extensionRegistry));
break;
}
case 42: {
if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
outArg_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000010;
}
outArg_.add(
input.readMessage(org.tensorflow.framework.ApiDef.Arg.parser(), extensionRegistry));
break;
}
case 50: {
if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
attr_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000040;
}
attr_.add(
input.readMessage(org.tensorflow.framework.ApiDef.Attr.parser(), extensionRegistry));
break;
}
case 58: {
java.lang.String s = input.readStringRequireUtf8();
summary_ = s;
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
case 74: {
java.lang.String s = input.readStringRequireUtf8();
descriptionPrefix_ = s;
break;
}
case 82: {
java.lang.String s = input.readStringRequireUtf8();
descriptionSuffix_ = s;
break;
}
case 90: {
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
argOrder_ = new org.nd4j.shade.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000020;
}
argOrder_.add(s);
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
endpoint_ = java.util.Collections.unmodifiableList(endpoint_);
}
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
inArg_ = java.util.Collections.unmodifiableList(inArg_);
}
if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
outArg_ = java.util.Collections.unmodifiableList(outArg_);
}
if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
attr_ = java.util.Collections.unmodifiableList(attr_);
}
if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
argOrder_ = argOrder_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.class, org.tensorflow.framework.ApiDef.Builder.class);
}
/**
* Protobuf enum {@code tensorflow.ApiDef.Visibility}
*/
public enum Visibility
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
*
* Normally this is "VISIBLE" unless you are inheriting a
* different value from another ApiDef.
*
* Normally this is "VISIBLE" unless you are inheriting a
* different value from another ApiDef.
*
*
* DEFAULT_VISIBILITY = 0;
*/
public static final int DEFAULT_VISIBILITY_VALUE = 0;
/**
*
* Publicly visible in the API.
*
*
* VISIBLE = 1;
*/
public static final int VISIBLE_VALUE = 1;
/**
*
* Do not include this op in the generated API. If visibility is
* set to 'SKIP', other fields are ignored for this op.
*
*
* SKIP = 2;
*/
public static final int SKIP_VALUE = 2;
/**
*
* Hide this op by putting it into an internal namespace (or whatever
* is appropriate in the target language).
*
*
* HIDDEN = 3;
*/
public static final int HIDDEN_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Visibility valueOf(int value) {
return forNumber(value);
}
public static Visibility forNumber(int value) {
switch (value) {
case 0: return DEFAULT_VISIBILITY;
case 1: return VISIBLE;
case 2: return SKIP;
case 3: return HIDDEN;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
Visibility> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public Visibility findValueByNumber(int number) {
return Visibility.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.tensorflow.framework.ApiDef.getDescriptor().getEnumTypes().get(0);
}
private static final Visibility[] VALUES = values();
public static Visibility valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Visibility(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:tensorflow.ApiDef.Visibility)
}
public interface EndpointOrBuilder extends
// @@protoc_insertion_point(interface_extends:tensorflow.ApiDef.Endpoint)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If you specify any endpoint, this will replace all of the
* inherited endpoints. The first endpoint should be the
* "canonical" endpoint, and should not be deprecated (unless all
* endpoints are deprecated).
*
*
* Protobuf type {@code tensorflow.ApiDef.Endpoint}
*/
public static final class Endpoint extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.ApiDef.Endpoint)
EndpointOrBuilder {
private static final long serialVersionUID = 0L;
// Use Endpoint.newBuilder() to construct.
private Endpoint(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private Endpoint() {
name_ = "";
deprecationMessage_ = "";
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Endpoint(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
deprecationMessage_ = s;
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Endpoint_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Endpoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.Endpoint.class, org.tensorflow.framework.ApiDef.Endpoint.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
*
* string name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DEPRECATION_MESSAGE_FIELD_NUMBER = 2;
private volatile java.lang.Object deprecationMessage_;
/**
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
*
* string deprecation_message = 2;
*/
public org.nd4j.shade.protobuf.ByteString
getDeprecationMessageBytes() {
java.lang.Object ref = deprecationMessage_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
deprecationMessage_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!getDeprecationMessageBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, deprecationMessage_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!getDeprecationMessageBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, deprecationMessage_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.ApiDef.Endpoint)) {
return super.equals(obj);
}
org.tensorflow.framework.ApiDef.Endpoint other = (org.tensorflow.framework.ApiDef.Endpoint) obj;
boolean result = true;
result = result && getName()
.equals(other.getName());
result = result && getDeprecationMessage()
.equals(other.getDeprecationMessage());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DEPRECATION_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getDeprecationMessage().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Endpoint parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.ApiDef.Endpoint prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* If you specify any endpoint, this will replace all of the
* inherited endpoints. The first endpoint should be the
* "canonical" endpoint, and should not be deprecated (unless all
* endpoints are deprecated).
*
*
* Protobuf type {@code tensorflow.ApiDef.Endpoint}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.ApiDef.Endpoint)
org.tensorflow.framework.ApiDef.EndpointOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Endpoint_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Endpoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.Endpoint.class, org.tensorflow.framework.ApiDef.Endpoint.Builder.class);
}
// Construct using org.tensorflow.framework.ApiDef.Endpoint.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
name_ = "";
deprecationMessage_ = "";
return this;
}
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Endpoint_descriptor;
}
public org.tensorflow.framework.ApiDef.Endpoint getDefaultInstanceForType() {
return org.tensorflow.framework.ApiDef.Endpoint.getDefaultInstance();
}
public org.tensorflow.framework.ApiDef.Endpoint build() {
org.tensorflow.framework.ApiDef.Endpoint result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.tensorflow.framework.ApiDef.Endpoint buildPartial() {
org.tensorflow.framework.ApiDef.Endpoint result = new org.tensorflow.framework.ApiDef.Endpoint(this);
result.name_ = name_;
result.deprecationMessage_ = deprecationMessage_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof org.tensorflow.framework.ApiDef.Endpoint) {
return mergeFrom((org.tensorflow.framework.ApiDef.Endpoint)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.framework.ApiDef.Endpoint other) {
if (other == org.tensorflow.framework.ApiDef.Endpoint.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getDeprecationMessage().isEmpty()) {
deprecationMessage_ = other.deprecationMessage_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.framework.ApiDef.Endpoint parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.framework.ApiDef.Endpoint) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
*
* string name = 1;
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
*
* string name = 1;
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
* Name should be either like "CamelCaseName" or
* "Package.CamelCaseName". Client-language-specific ApiDefs may
* use a snake_case convention instead of CamelCase.
*
*
* string name = 1;
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object deprecationMessage_ = "";
/**
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
* If this endpoint is deprecated, set deprecation_message to a
* message that should be logged when the endpoint is used.
* The message should indicate alternative endpoint to use, if any.
*
*
* string deprecation_message = 2;
*/
public Builder setDeprecationMessageBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
deprecationMessage_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.ApiDef.Endpoint)
}
// @@protoc_insertion_point(class_scope:tensorflow.ApiDef.Endpoint)
private static final org.tensorflow.framework.ApiDef.Endpoint DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.ApiDef.Endpoint();
}
public static org.tensorflow.framework.ApiDef.Endpoint getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
public Endpoint parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new Endpoint(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
public org.tensorflow.framework.ApiDef.Endpoint getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ArgOrBuilder extends
// @@protoc_insertion_point(interface_extends:tensorflow.ApiDef.Arg)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* string name = 1;
*/
java.lang.String getName();
/**
* string name = 1;
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
*
* string description = 3;
*/
org.nd4j.shade.protobuf.ByteString
getDescriptionBytes();
}
/**
* Protobuf type {@code tensorflow.ApiDef.Arg}
*/
public static final class Arg extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.ApiDef.Arg)
ArgOrBuilder {
private static final long serialVersionUID = 0L;
// Use Arg.newBuilder() to construct.
private Arg(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private Arg() {
name_ = "";
renameTo_ = "";
description_ = "";
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Arg(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
renameTo_ = s;
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Arg_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Arg_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.Arg.class, org.tensorflow.framework.ApiDef.Arg.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* string name = 1;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* string name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int RENAME_TO_FIELD_NUMBER = 2;
private volatile java.lang.Object renameTo_;
/**
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
*
* string rename_to = 2;
*/
public org.nd4j.shade.protobuf.ByteString
getRenameToBytes() {
java.lang.Object ref = renameTo_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
renameTo_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DESCRIPTION_FIELD_NUMBER = 3;
private volatile java.lang.Object description_;
/**
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
*
* string description = 3;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!getRenameToBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, renameTo_);
}
if (!getDescriptionBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 3, description_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!getRenameToBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, renameTo_);
}
if (!getDescriptionBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(3, description_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.ApiDef.Arg)) {
return super.equals(obj);
}
org.tensorflow.framework.ApiDef.Arg other = (org.tensorflow.framework.ApiDef.Arg) obj;
boolean result = true;
result = result && getName()
.equals(other.getName());
result = result && getRenameTo()
.equals(other.getRenameTo());
result = result && getDescription()
.equals(other.getDescription());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + RENAME_TO_FIELD_NUMBER;
hash = (53 * hash) + getRenameTo().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Arg parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Arg parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Arg parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.ApiDef.Arg prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code tensorflow.ApiDef.Arg}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.ApiDef.Arg)
org.tensorflow.framework.ApiDef.ArgOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Arg_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Arg_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.Arg.class, org.tensorflow.framework.ApiDef.Arg.Builder.class);
}
// Construct using org.tensorflow.framework.ApiDef.Arg.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
name_ = "";
renameTo_ = "";
description_ = "";
return this;
}
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Arg_descriptor;
}
public org.tensorflow.framework.ApiDef.Arg getDefaultInstanceForType() {
return org.tensorflow.framework.ApiDef.Arg.getDefaultInstance();
}
public org.tensorflow.framework.ApiDef.Arg build() {
org.tensorflow.framework.ApiDef.Arg result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.tensorflow.framework.ApiDef.Arg buildPartial() {
org.tensorflow.framework.ApiDef.Arg result = new org.tensorflow.framework.ApiDef.Arg(this);
result.name_ = name_;
result.renameTo_ = renameTo_;
result.description_ = description_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof org.tensorflow.framework.ApiDef.Arg) {
return mergeFrom((org.tensorflow.framework.ApiDef.Arg)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.framework.ApiDef.Arg other) {
if (other == org.tensorflow.framework.ApiDef.Arg.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getRenameTo().isEmpty()) {
renameTo_ = other.renameTo_;
onChanged();
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.framework.ApiDef.Arg parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.framework.ApiDef.Arg) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
* string name = 1;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string name = 1;
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* string name = 1;
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* string name = 1;
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object renameTo_ = "";
/**
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this arg in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
* Note: this will replace any inherited arg doc. There is no
* current way of modifying arg descriptions (other than replacing
* them entirely) as can be done with op descriptions.
*
*
* string description = 3;
*/
public Builder setDescriptionBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.ApiDef.Arg)
}
// @@protoc_insertion_point(class_scope:tensorflow.ApiDef.Arg)
private static final org.tensorflow.framework.ApiDef.Arg DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.ApiDef.Arg();
}
public static org.tensorflow.framework.ApiDef.Arg getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
public Arg parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new Arg(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
public org.tensorflow.framework.ApiDef.Arg getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AttrOrBuilder extends
// @@protoc_insertion_point(interface_extends:tensorflow.ApiDef.Attr)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* string name = 1;
*/
java.lang.String getName();
/**
* string name = 1;
*/
org.nd4j.shade.protobuf.ByteString
getNameBytes();
/**
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Description of the graph-construction-time configuration of this
* Op. That is to say, this describes the attr fields that will
* be specified in the NodeDef.
*
*
* Protobuf type {@code tensorflow.ApiDef.Attr}
*/
public static final class Attr extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.ApiDef.Attr)
AttrOrBuilder {
private static final long serialVersionUID = 0L;
// Use Attr.newBuilder() to construct.
private Attr(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private Attr() {
name_ = "";
renameTo_ = "";
description_ = "";
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Attr(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
renameTo_ = s;
break;
}
case 26: {
org.tensorflow.framework.AttrValue.Builder subBuilder = null;
if (defaultValue_ != null) {
subBuilder = defaultValue_.toBuilder();
}
defaultValue_ = input.readMessage(org.tensorflow.framework.AttrValue.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(defaultValue_);
defaultValue_ = subBuilder.buildPartial();
}
break;
}
case 34: {
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Attr_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Attr_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.Attr.class, org.tensorflow.framework.ApiDef.Attr.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* string name = 1;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* string name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int RENAME_TO_FIELD_NUMBER = 2;
private volatile java.lang.Object renameTo_;
/**
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
*
* string rename_to = 2;
*/
public org.nd4j.shade.protobuf.ByteString
getRenameToBytes() {
java.lang.Object ref = renameTo_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
renameTo_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DEFAULT_VALUE_FIELD_NUMBER = 3;
private org.tensorflow.framework.AttrValue defaultValue_;
/**
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
*
* .tensorflow.AttrValue default_value = 3;
*/
public org.tensorflow.framework.AttrValueOrBuilder getDefaultValueOrBuilder() {
return getDefaultValue();
}
public static final int DESCRIPTION_FIELD_NUMBER = 4;
private volatile java.lang.Object description_;
/**
*
* Note: this will replace any inherited attr doc, there is no current
* way of modifying attr descriptions as can be done with op descriptions.
*
* Note: this will replace any inherited attr doc, there is no current
* way of modifying attr descriptions as can be done with op descriptions.
*
*
* string description = 4;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!getRenameToBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, renameTo_);
}
if (defaultValue_ != null) {
output.writeMessage(3, getDefaultValue());
}
if (!getDescriptionBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 4, description_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!getRenameToBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, renameTo_);
}
if (defaultValue_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(3, getDefaultValue());
}
if (!getDescriptionBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(4, description_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.ApiDef.Attr)) {
return super.equals(obj);
}
org.tensorflow.framework.ApiDef.Attr other = (org.tensorflow.framework.ApiDef.Attr) obj;
boolean result = true;
result = result && getName()
.equals(other.getName());
result = result && getRenameTo()
.equals(other.getRenameTo());
result = result && (hasDefaultValue() == other.hasDefaultValue());
if (hasDefaultValue()) {
result = result && getDefaultValue()
.equals(other.getDefaultValue());
}
result = result && getDescription()
.equals(other.getDescription());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + RENAME_TO_FIELD_NUMBER;
hash = (53 * hash) + getRenameTo().hashCode();
if (hasDefaultValue()) {
hash = (37 * hash) + DEFAULT_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getDefaultValue().hashCode();
}
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Attr parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Attr parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef.Attr parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.ApiDef.Attr prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Description of the graph-construction-time configuration of this
* Op. That is to say, this describes the attr fields that will
* be specified in the NodeDef.
*
*
* Protobuf type {@code tensorflow.ApiDef.Attr}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.ApiDef.Attr)
org.tensorflow.framework.ApiDef.AttrOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Attr_descriptor;
}
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Attr_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ApiDef.Attr.class, org.tensorflow.framework.ApiDef.Attr.Builder.class);
}
// Construct using org.tensorflow.framework.ApiDef.Attr.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
name_ = "";
renameTo_ = "";
if (defaultValueBuilder_ == null) {
defaultValue_ = null;
} else {
defaultValue_ = null;
defaultValueBuilder_ = null;
}
description_ = "";
return this;
}
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.framework.ApiDefProtos.internal_static_tensorflow_ApiDef_Attr_descriptor;
}
public org.tensorflow.framework.ApiDef.Attr getDefaultInstanceForType() {
return org.tensorflow.framework.ApiDef.Attr.getDefaultInstance();
}
public org.tensorflow.framework.ApiDef.Attr build() {
org.tensorflow.framework.ApiDef.Attr result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.tensorflow.framework.ApiDef.Attr buildPartial() {
org.tensorflow.framework.ApiDef.Attr result = new org.tensorflow.framework.ApiDef.Attr(this);
result.name_ = name_;
result.renameTo_ = renameTo_;
if (defaultValueBuilder_ == null) {
result.defaultValue_ = defaultValue_;
} else {
result.defaultValue_ = defaultValueBuilder_.build();
}
result.description_ = description_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof org.tensorflow.framework.ApiDef.Attr) {
return mergeFrom((org.tensorflow.framework.ApiDef.Attr)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.framework.ApiDef.Attr other) {
if (other == org.tensorflow.framework.ApiDef.Attr.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getRenameTo().isEmpty()) {
renameTo_ = other.renameTo_;
onChanged();
}
if (other.hasDefaultValue()) {
mergeDefaultValue(other.getDefaultValue());
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.framework.ApiDef.Attr parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.framework.ApiDef.Attr) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
* string name = 1;
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string name = 1;
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* string name = 1;
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* string name = 1;
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object renameTo_ = "";
/**
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Change the name used to access this attr in the API from what
* is used in the GraphDef. Note that these names in `backticks`
* will also be replaced in the summary & description fields.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
*
* .tensorflow.AttrValue default_value = 3;
*/
public Builder setDefaultValue(org.tensorflow.framework.AttrValue value) {
if (defaultValueBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
defaultValue_ = value;
onChanged();
} else {
defaultValueBuilder_.setMessage(value);
}
return this;
}
/**
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Specify a new default value to use for this attr. This default
* will be used when creating new graphs, as opposed to the
* default in the OpDef, which will be used when interpreting old
* GraphDefs.
*
* Name of the op (in the OpDef) to specify the API for.
*
*
* string graph_op_name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getGraphOpNameBytes() {
java.lang.Object ref = graphOpName_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
graphOpName_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int VISIBILITY_FIELD_NUMBER = 2;
private int visibility_;
/**
* .tensorflow.ApiDef.Visibility visibility = 2;
*/
public int getVisibilityValue() {
return visibility_;
}
/**
* .tensorflow.ApiDef.Visibility visibility = 2;
*/
public org.tensorflow.framework.ApiDef.Visibility getVisibility() {
org.tensorflow.framework.ApiDef.Visibility result = org.tensorflow.framework.ApiDef.Visibility.valueOf(visibility_);
return result == null ? org.tensorflow.framework.ApiDef.Visibility.UNRECOGNIZED : result;
}
public static final int ENDPOINT_FIELD_NUMBER = 3;
private java.util.List endpoint_;
/**
* repeated .tensorflow.ApiDef.Endpoint endpoint = 3;
*/
public java.util.List getEndpointList() {
return endpoint_;
}
/**
* repeated .tensorflow.ApiDef.Endpoint endpoint = 3;
*/
public java.util.List extends org.tensorflow.framework.ApiDef.EndpointOrBuilder>
getEndpointOrBuilderList() {
return endpoint_;
}
/**
* repeated .tensorflow.ApiDef.Endpoint endpoint = 3;
*/
public int getEndpointCount() {
return endpoint_.size();
}
/**
* repeated .tensorflow.ApiDef.Endpoint endpoint = 3;
*/
public org.tensorflow.framework.ApiDef.Endpoint getEndpoint(int index) {
return endpoint_.get(index);
}
/**
* repeated .tensorflow.ApiDef.Endpoint endpoint = 3;
*/
public org.tensorflow.framework.ApiDef.EndpointOrBuilder getEndpointOrBuilder(
int index) {
return endpoint_.get(index);
}
public static final int IN_ARG_FIELD_NUMBER = 4;
private java.util.List inArg_;
/**
* repeated .tensorflow.ApiDef.Arg in_arg = 4;
*/
public java.util.List getInArgList() {
return inArg_;
}
/**
* repeated .tensorflow.ApiDef.Arg in_arg = 4;
*/
public java.util.List extends org.tensorflow.framework.ApiDef.ArgOrBuilder>
getInArgOrBuilderList() {
return inArg_;
}
/**
* repeated .tensorflow.ApiDef.Arg in_arg = 4;
*/
public int getInArgCount() {
return inArg_.size();
}
/**
* repeated .tensorflow.ApiDef.Arg in_arg = 4;
*/
public org.tensorflow.framework.ApiDef.Arg getInArg(int index) {
return inArg_.get(index);
}
/**
* repeated .tensorflow.ApiDef.Arg in_arg = 4;
*/
public org.tensorflow.framework.ApiDef.ArgOrBuilder getInArgOrBuilder(
int index) {
return inArg_.get(index);
}
public static final int OUT_ARG_FIELD_NUMBER = 5;
private java.util.List outArg_;
/**
* repeated .tensorflow.ApiDef.Arg out_arg = 5;
*/
public java.util.List getOutArgList() {
return outArg_;
}
/**
* repeated .tensorflow.ApiDef.Arg out_arg = 5;
*/
public java.util.List extends org.tensorflow.framework.ApiDef.ArgOrBuilder>
getOutArgOrBuilderList() {
return outArg_;
}
/**
* repeated .tensorflow.ApiDef.Arg out_arg = 5;
*/
public int getOutArgCount() {
return outArg_.size();
}
/**
* repeated .tensorflow.ApiDef.Arg out_arg = 5;
*/
public org.tensorflow.framework.ApiDef.Arg getOutArg(int index) {
return outArg_.get(index);
}
/**
* repeated .tensorflow.ApiDef.Arg out_arg = 5;
*/
public org.tensorflow.framework.ApiDef.ArgOrBuilder getOutArgOrBuilder(
int index) {
return outArg_.get(index);
}
public static final int ARG_ORDER_FIELD_NUMBER = 11;
private org.nd4j.shade.protobuf.LazyStringList argOrder_;
/**
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
*
* repeated string arg_order = 11;
*/
public int getArgOrderCount() {
return argOrder_.size();
}
/**
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
*
* repeated string arg_order = 11;
*/
public org.nd4j.shade.protobuf.ByteString
getArgOrderBytes(int index) {
return argOrder_.getByteString(index);
}
public static final int ATTR_FIELD_NUMBER = 6;
private java.util.List attr_;
/**
* repeated .tensorflow.ApiDef.Attr attr = 6;
*/
public java.util.List getAttrList() {
return attr_;
}
/**
* repeated .tensorflow.ApiDef.Attr attr = 6;
*/
public java.util.List extends org.tensorflow.framework.ApiDef.AttrOrBuilder>
getAttrOrBuilderList() {
return attr_;
}
/**
* repeated .tensorflow.ApiDef.Attr attr = 6;
*/
public int getAttrCount() {
return attr_.size();
}
/**
* repeated .tensorflow.ApiDef.Attr attr = 6;
*/
public org.tensorflow.framework.ApiDef.Attr getAttr(int index) {
return attr_.get(index);
}
/**
* repeated .tensorflow.ApiDef.Attr attr = 6;
*/
public org.tensorflow.framework.ApiDef.AttrOrBuilder getAttrOrBuilder(
int index) {
return attr_.get(index);
}
public static final int SUMMARY_FIELD_NUMBER = 7;
private volatile java.lang.Object summary_;
/**
*
* One-line human-readable description of what the Op does.
*
* Modify an existing/inherited description by adding text to the beginning
* or end.
*
*
* string description_prefix = 9;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionPrefixBytes() {
java.lang.Object ref = descriptionPrefix_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
descriptionPrefix_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int DESCRIPTION_SUFFIX_FIELD_NUMBER = 10;
private volatile java.lang.Object descriptionSuffix_;
/**
* string description_suffix = 10;
*/
public java.lang.String getDescriptionSuffix() {
java.lang.Object ref = descriptionSuffix_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
descriptionSuffix_ = s;
return s;
}
}
/**
* string description_suffix = 10;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionSuffixBytes() {
java.lang.Object ref = descriptionSuffix_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
descriptionSuffix_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getGraphOpNameBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, graphOpName_);
}
if (visibility_ != org.tensorflow.framework.ApiDef.Visibility.DEFAULT_VISIBILITY.getNumber()) {
output.writeEnum(2, visibility_);
}
for (int i = 0; i < endpoint_.size(); i++) {
output.writeMessage(3, endpoint_.get(i));
}
for (int i = 0; i < inArg_.size(); i++) {
output.writeMessage(4, inArg_.get(i));
}
for (int i = 0; i < outArg_.size(); i++) {
output.writeMessage(5, outArg_.get(i));
}
for (int i = 0; i < attr_.size(); i++) {
output.writeMessage(6, attr_.get(i));
}
if (!getSummaryBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 7, summary_);
}
if (!getDescriptionBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 8, description_);
}
if (!getDescriptionPrefixBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 9, descriptionPrefix_);
}
if (!getDescriptionSuffixBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 10, descriptionSuffix_);
}
for (int i = 0; i < argOrder_.size(); i++) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 11, argOrder_.getRaw(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getGraphOpNameBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, graphOpName_);
}
if (visibility_ != org.tensorflow.framework.ApiDef.Visibility.DEFAULT_VISIBILITY.getNumber()) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeEnumSize(2, visibility_);
}
for (int i = 0; i < endpoint_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(3, endpoint_.get(i));
}
for (int i = 0; i < inArg_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(4, inArg_.get(i));
}
for (int i = 0; i < outArg_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(5, outArg_.get(i));
}
for (int i = 0; i < attr_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(6, attr_.get(i));
}
if (!getSummaryBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(7, summary_);
}
if (!getDescriptionBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(8, description_);
}
if (!getDescriptionPrefixBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(9, descriptionPrefix_);
}
if (!getDescriptionSuffixBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(10, descriptionSuffix_);
}
{
int dataSize = 0;
for (int i = 0; i < argOrder_.size(); i++) {
dataSize += computeStringSizeNoTag(argOrder_.getRaw(i));
}
size += dataSize;
size += 1 * getArgOrderList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.ApiDef)) {
return super.equals(obj);
}
org.tensorflow.framework.ApiDef other = (org.tensorflow.framework.ApiDef) obj;
boolean result = true;
result = result && getGraphOpName()
.equals(other.getGraphOpName());
result = result && visibility_ == other.visibility_;
result = result && getEndpointList()
.equals(other.getEndpointList());
result = result && getInArgList()
.equals(other.getInArgList());
result = result && getOutArgList()
.equals(other.getOutArgList());
result = result && getArgOrderList()
.equals(other.getArgOrderList());
result = result && getAttrList()
.equals(other.getAttrList());
result = result && getSummary()
.equals(other.getSummary());
result = result && getDescription()
.equals(other.getDescription());
result = result && getDescriptionPrefix()
.equals(other.getDescriptionPrefix());
result = result && getDescriptionSuffix()
.equals(other.getDescriptionSuffix());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + GRAPH_OP_NAME_FIELD_NUMBER;
hash = (53 * hash) + getGraphOpName().hashCode();
hash = (37 * hash) + VISIBILITY_FIELD_NUMBER;
hash = (53 * hash) + visibility_;
if (getEndpointCount() > 0) {
hash = (37 * hash) + ENDPOINT_FIELD_NUMBER;
hash = (53 * hash) + getEndpointList().hashCode();
}
if (getInArgCount() > 0) {
hash = (37 * hash) + IN_ARG_FIELD_NUMBER;
hash = (53 * hash) + getInArgList().hashCode();
}
if (getOutArgCount() > 0) {
hash = (37 * hash) + OUT_ARG_FIELD_NUMBER;
hash = (53 * hash) + getOutArgList().hashCode();
}
if (getArgOrderCount() > 0) {
hash = (37 * hash) + ARG_ORDER_FIELD_NUMBER;
hash = (53 * hash) + getArgOrderList().hashCode();
}
if (getAttrCount() > 0) {
hash = (37 * hash) + ATTR_FIELD_NUMBER;
hash = (53 * hash) + getAttrList().hashCode();
}
hash = (37 * hash) + SUMMARY_FIELD_NUMBER;
hash = (53 * hash) + getSummary().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (37 * hash) + DESCRIPTION_PREFIX_FIELD_NUMBER;
hash = (53 * hash) + getDescriptionPrefix().hashCode();
hash = (37 * hash) + DESCRIPTION_SUFFIX_FIELD_NUMBER;
hash = (53 * hash) + getDescriptionSuffix().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.ApiDef parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ApiDef parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ApiDef parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ApiDef parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.ApiDef prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Used to specify and override the default API & behavior in the
* generated code for client languages, from what you would get from
* the OpDef alone. There will be a set of ApiDefs that are common
* to all client languages, and another set per client language.
* The per-client-language ApiDefs will inherit values from the
* common ApiDefs which it can either replace or modify.
* We separate the API definition from the OpDef so we can evolve the
* API while remaining backwards compatible when interpretting old
* graphs. Overrides go in an "api_def.pbtxt" file with a text-format
* ApiDefs message.
* WARNING: Be *very* careful changing the API for any existing op --
* you can change the semantics of existing code. These changes may
* need to wait until a major release of TensorFlow to avoid breaking
* our compatibility promises.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
*
* repeated string arg_order = 11;
*/
public int getArgOrderCount() {
return argOrder_.size();
}
/**
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
*
* repeated string arg_order = 11;
*/
public Builder setArgOrder(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureArgOrderIsMutable();
argOrder_.set(index, value);
onChanged();
return this;
}
/**
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*
* List of original in_arg names to specify new argument order.
* Length of arg_order should be either empty to keep current order
* or match size of in_arg.
*