Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
*
* .tensorflow.DataType type = 3;
*/
int getTypeValue();
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
* For inputs: if true, the inputs are required to be refs.
* By default, inputs can be either refs or non-refs.
* For outputs: if true, outputs are refs, otherwise they are not.
*
*
* string description = 2;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 3;
private int type_;
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
*
* .tensorflow.DataType type = 3;
*/
public int getTypeValue() {
return type_;
}
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
*
* .tensorflow.DataType type = 3;
*/
public org.tensorflow.framework.DataType getType() {
@SuppressWarnings("deprecation")
org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(type_);
return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result;
}
public static final int TYPE_ATTR_FIELD_NUMBER = 4;
private volatile java.lang.Object typeAttr_;
/**
*
* If specified, attr must have type "list(type)", and none of
* type, type_attr, and number_attr may be specified.
*
*
* string type_list_attr = 6;
*/
public org.nd4j.shade.protobuf.ByteString
getTypeListAttrBytes() {
java.lang.Object ref = typeListAttr_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
typeListAttr_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int IS_REF_FIELD_NUMBER = 16;
private boolean isRef_;
/**
*
* For inputs: if true, the inputs are required to be refs.
* By default, inputs can be either refs or non-refs.
* For outputs: if true, outputs are refs, otherwise they are not.
*
*
* string description = 2;
*/
public Builder setDescriptionBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
onChanged();
return this;
}
private int type_ = 0;
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
*
* .tensorflow.DataType type = 3;
*/
public int getTypeValue() {
return type_;
}
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
*
* .tensorflow.DataType type = 3;
*/
public org.tensorflow.framework.DataType getType() {
@SuppressWarnings("deprecation")
org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(type_);
return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result;
}
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
*
* .tensorflow.DataType type = 3;
*/
public Builder setType(org.tensorflow.framework.DataType value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Describes the type of one or more tensors that are accepted/produced
* by this input/output arg. The only legal combinations are:
* * For a single tensor: either the "type" field is set or the
* "type_attr" field is set to the name of an attr with type "type".
* * For a sequence of tensors with the same type: the "number_attr"
* field will be set to the name of an attr with type "int", and
* either the "type" or "type_attr" field will be set as for
* single tensors.
* * For a sequence of tensors, the "type_list_attr" field will be set
* to the name of an attr with type "list(type)".
*
* For inputs: if true, the inputs are required to be refs.
* By default, inputs can be either refs or non-refs.
* For outputs: if true, outputs are refs, otherwise they are not.
*
* For inputs: if true, the inputs are required to be refs.
* By default, inputs can be either refs or non-refs.
* For outputs: if true, outputs are refs, otherwise they are not.
*
* For inputs: if true, the inputs are required to be refs.
* By default, inputs can be either refs or non-refs.
* For outputs: if true, outputs are refs, otherwise they are not.
*
*
* bool is_ref = 16;
*/
public Builder clearIsRef() {
isRef_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.OpDef.ArgDef)
}
// @@protoc_insertion_point(class_scope:tensorflow.OpDef.ArgDef)
private static final org.tensorflow.framework.OpDef.ArgDef DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.OpDef.ArgDef();
}
public static org.tensorflow.framework.OpDef.ArgDef getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public ArgDef parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new ArgDef(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.tensorflow.framework.OpDef.ArgDef getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface AttrDefOrBuilder extends
// @@protoc_insertion_point(interface_extends:tensorflow.OpDef.AttrDef)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* Description of the graph-construction-time configuration of this
* Op. That is to say, this describes the attr fields that will
* be specified in the NodeDef.
*
*
* Protobuf type {@code tensorflow.OpDef.AttrDef}
*/
public static final class AttrDef extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.OpDef.AttrDef)
AttrDefOrBuilder {
private static final long serialVersionUID = 0L;
// Use AttrDef.newBuilder() to construct.
private AttrDef(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private AttrDef() {
name_ = "";
type_ = "";
description_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AttrDef();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AttrDef(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
type_ = s;
break;
}
case 26: {
org.tensorflow.framework.AttrValue.Builder subBuilder = null;
if (defaultValue_ != null) {
subBuilder = defaultValue_.toBuilder();
}
defaultValue_ = input.readMessage(org.tensorflow.framework.AttrValue.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(defaultValue_);
defaultValue_ = subBuilder.buildPartial();
}
break;
}
case 34: {
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
case 40: {
hasMinimum_ = input.readBool();
break;
}
case 48: {
minimum_ = input.readInt64();
break;
}
case 58: {
org.tensorflow.framework.AttrValue.Builder subBuilder = null;
if (allowedValues_ != null) {
subBuilder = allowedValues_.toBuilder();
}
allowedValues_ = input.readMessage(org.tensorflow.framework.AttrValue.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(allowedValues_);
allowedValues_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.OpDef.AttrDef.class, org.tensorflow.framework.OpDef.AttrDef.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
*
* string name = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 2;
private volatile java.lang.Object type_;
/**
*
* One of the type names from attr_value.proto ("string", "list(string)",
* "int", etc.).
*
*
* string description = 4;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int HAS_MINIMUM_FIELD_NUMBER = 5;
private boolean hasMinimum_;
/**
*
* For type == "int", this is a minimum value. For "list(___)"
* types, this is the minimum length.
*
*
* bool has_minimum = 5;
*/
public boolean getHasMinimum() {
return hasMinimum_;
}
public static final int MINIMUM_FIELD_NUMBER = 6;
private long minimum_;
/**
* int64 minimum = 6;
*/
public long getMinimum() {
return minimum_;
}
public static final int ALLOWED_VALUES_FIELD_NUMBER = 7;
private org.tensorflow.framework.AttrValue allowedValues_;
/**
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* Description of the graph-construction-time configuration of this
* Op. That is to say, this describes the attr fields that will
* be specified in the NodeDef.
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
*
* string name = 1;
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
*
* string name = 1;
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
* A descriptive name for the argument. May be used, e.g. by the
* Python client, as a keyword argument name, and so should match
* the regexp "[a-z][a-z0-9_]+".
*
*
* string name = 1;
*/
public Builder setNameBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object type_ = "";
/**
*
* One of the type names from attr_value.proto ("string", "list(string)",
* "int", etc.).
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
*
* .tensorflow.AttrValue allowed_values = 7;
*/
public Builder setAllowedValues(org.tensorflow.framework.AttrValue value) {
if (allowedValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
allowedValues_ = value;
onChanged();
} else {
allowedValuesBuilder_.setMessage(value);
}
return this;
}
/**
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
* The set of allowed values. Has type that is the "list" version
* of the "type" field above (uses the "list" field of AttrValue).
* If type == "type" or "list(type)" above, then the "type" field
* of "allowed_values.list" has the set of allowed DataTypes.
* If type == "string" or "list(string)", then the "s" field of
* "allowed_values.list" has the set of allowed strings.
*
*
* .tensorflow.AttrValue allowed_values = 7;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder>
getAllowedValuesFieldBuilder() {
if (allowedValuesBuilder_ == null) {
allowedValuesBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder>(
getAllowedValues(),
getParentForChildren(),
isClean());
allowedValues_ = null;
}
return allowedValuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.OpDef.AttrDef)
}
// @@protoc_insertion_point(class_scope:tensorflow.OpDef.AttrDef)
private static final org.tensorflow.framework.OpDef.AttrDef DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.OpDef.AttrDef();
}
public static org.tensorflow.framework.OpDef.AttrDef getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public AttrDef parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new AttrDef(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.tensorflow.framework.OpDef.AttrDef getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
* Op names starting with an underscore are reserved for internal use.
* Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
*
* Additional, longer human-readable description of what the Op does.
*
*
* string description = 6;
*/
public org.nd4j.shade.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int IS_COMMUTATIVE_FIELD_NUMBER = 18;
private boolean isCommutative_;
/**
*
* True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
*
*
* bool is_commutative = 18;
*/
public boolean getIsCommutative() {
return isCommutative_;
}
public static final int IS_AGGREGATE_FIELD_NUMBER = 16;
private boolean isAggregate_;
/**
*
* If is_aggregate is true, then this operation accepts N >= 2
* inputs and produces 1 output all of the same type. Should be
* associative and commutative, and produce output with the same
* shape as the input. The optimizer may replace an aggregate op
* taking input from multiple devices with a tree of aggregate ops
* that aggregate locally within each device (and possibly within
* groups of nearby devices) before communicating.
* TODO(josh11b): Implement that optimization.
*
*
* bool is_aggregate = 16;
*/
public boolean getIsAggregate() {
return isAggregate_;
}
public static final int IS_STATEFUL_FIELD_NUMBER = 17;
private boolean isStateful_;
/**
*
* Ops are marked as stateful if their behavior depends on some state beyond
* their input tensors (e.g. variable reading op) or if they have
* a side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
* must always produce the same output for the same input and have
* no side-effects.
* By default Ops may be moved between devices. Stateful ops should
* either not be moved, or should only be moved if that state can also
* be moved (e.g. via some sort of save / restore).
* Stateful ops are guaranteed to never be optimized away by Common
* Subexpression Elimination (CSE).
*
*
* bool is_stateful = 17;
*/
public boolean getIsStateful() {
return isStateful_;
}
public static final int ALLOWS_UNINITIALIZED_INPUT_FIELD_NUMBER = 19;
private boolean allowsUninitializedInput_;
/**
*
* By default, all inputs to an Op must be initialized Tensors. Ops
* that may initialize tensors for the first time should set this
* field to true, to allow the Op to take an uninitialized Tensor as
* input.
*
*
* bool allows_uninitialized_input = 19;
*/
public boolean getAllowsUninitializedInput() {
return allowsUninitializedInput_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
for (int i = 0; i < inputArg_.size(); i++) {
output.writeMessage(2, inputArg_.get(i));
}
for (int i = 0; i < outputArg_.size(); i++) {
output.writeMessage(3, outputArg_.get(i));
}
for (int i = 0; i < attr_.size(); i++) {
output.writeMessage(4, attr_.get(i));
}
if (!getSummaryBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 5, summary_);
}
if (!getDescriptionBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 6, description_);
}
if (deprecation_ != null) {
output.writeMessage(8, getDeprecation());
}
if (isAggregate_ != false) {
output.writeBool(16, isAggregate_);
}
if (isStateful_ != false) {
output.writeBool(17, isStateful_);
}
if (isCommutative_ != false) {
output.writeBool(18, isCommutative_);
}
if (allowsUninitializedInput_ != false) {
output.writeBool(19, allowsUninitializedInput_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
for (int i = 0; i < inputArg_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(2, inputArg_.get(i));
}
for (int i = 0; i < outputArg_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(3, outputArg_.get(i));
}
for (int i = 0; i < attr_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(4, attr_.get(i));
}
if (!getSummaryBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(5, summary_);
}
if (!getDescriptionBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(6, description_);
}
if (deprecation_ != null) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(8, getDeprecation());
}
if (isAggregate_ != false) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeBoolSize(16, isAggregate_);
}
if (isStateful_ != false) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeBoolSize(17, isStateful_);
}
if (isCommutative_ != false) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeBoolSize(18, isCommutative_);
}
if (allowsUninitializedInput_ != false) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeBoolSize(19, allowsUninitializedInput_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.OpDef)) {
return super.equals(obj);
}
org.tensorflow.framework.OpDef other = (org.tensorflow.framework.OpDef) obj;
if (!getName()
.equals(other.getName())) return false;
if (!getInputArgList()
.equals(other.getInputArgList())) return false;
if (!getOutputArgList()
.equals(other.getOutputArgList())) return false;
if (!getAttrList()
.equals(other.getAttrList())) return false;
if (hasDeprecation() != other.hasDeprecation()) return false;
if (hasDeprecation()) {
if (!getDeprecation()
.equals(other.getDeprecation())) return false;
}
if (!getSummary()
.equals(other.getSummary())) return false;
if (!getDescription()
.equals(other.getDescription())) return false;
if (getIsCommutative()
!= other.getIsCommutative()) return false;
if (getIsAggregate()
!= other.getIsAggregate()) return false;
if (getIsStateful()
!= other.getIsStateful()) return false;
if (getAllowsUninitializedInput()
!= other.getAllowsUninitializedInput()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (getInputArgCount() > 0) {
hash = (37 * hash) + INPUT_ARG_FIELD_NUMBER;
hash = (53 * hash) + getInputArgList().hashCode();
}
if (getOutputArgCount() > 0) {
hash = (37 * hash) + OUTPUT_ARG_FIELD_NUMBER;
hash = (53 * hash) + getOutputArgList().hashCode();
}
if (getAttrCount() > 0) {
hash = (37 * hash) + ATTR_FIELD_NUMBER;
hash = (53 * hash) + getAttrList().hashCode();
}
if (hasDeprecation()) {
hash = (37 * hash) + DEPRECATION_FIELD_NUMBER;
hash = (53 * hash) + getDeprecation().hashCode();
}
hash = (37 * hash) + SUMMARY_FIELD_NUMBER;
hash = (53 * hash) + getSummary().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (37 * hash) + IS_COMMUTATIVE_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashBoolean(
getIsCommutative());
hash = (37 * hash) + IS_AGGREGATE_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashBoolean(
getIsAggregate());
hash = (37 * hash) + IS_STATEFUL_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashBoolean(
getIsStateful());
hash = (37 * hash) + ALLOWS_UNINITIALIZED_INPUT_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashBoolean(
getAllowsUninitializedInput());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.OpDef parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.OpDef parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.OpDef parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.OpDef parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.OpDef parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.OpDef parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.OpDef parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.OpDef parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.OpDef parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.OpDef parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.OpDef parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.OpDef parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.OpDef prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Defines an operation. A NodeDef in a GraphDef specifies an Op by
* using the "op" field which should match the name of a OpDef.
* LINT.IfChange
*
* If is_aggregate is true, then this operation accepts N >= 2
* inputs and produces 1 output all of the same type. Should be
* associative and commutative, and produce output with the same
* shape as the input. The optimizer may replace an aggregate op
* taking input from multiple devices with a tree of aggregate ops
* that aggregate locally within each device (and possibly within
* groups of nearby devices) before communicating.
* TODO(josh11b): Implement that optimization.
*
* If is_aggregate is true, then this operation accepts N >= 2
* inputs and produces 1 output all of the same type. Should be
* associative and commutative, and produce output with the same
* shape as the input. The optimizer may replace an aggregate op
* taking input from multiple devices with a tree of aggregate ops
* that aggregate locally within each device (and possibly within
* groups of nearby devices) before communicating.
* TODO(josh11b): Implement that optimization.
*
* If is_aggregate is true, then this operation accepts N >= 2
* inputs and produces 1 output all of the same type. Should be
* associative and commutative, and produce output with the same
* shape as the input. The optimizer may replace an aggregate op
* taking input from multiple devices with a tree of aggregate ops
* that aggregate locally within each device (and possibly within
* groups of nearby devices) before communicating.
* TODO(josh11b): Implement that optimization.
*
* Ops are marked as stateful if their behavior depends on some state beyond
* their input tensors (e.g. variable reading op) or if they have
* a side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
* must always produce the same output for the same input and have
* no side-effects.
* By default Ops may be moved between devices. Stateful ops should
* either not be moved, or should only be moved if that state can also
* be moved (e.g. via some sort of save / restore).
* Stateful ops are guaranteed to never be optimized away by Common
* Subexpression Elimination (CSE).
*
* Ops are marked as stateful if their behavior depends on some state beyond
* their input tensors (e.g. variable reading op) or if they have
* a side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
* must always produce the same output for the same input and have
* no side-effects.
* By default Ops may be moved between devices. Stateful ops should
* either not be moved, or should only be moved if that state can also
* be moved (e.g. via some sort of save / restore).
* Stateful ops are guaranteed to never be optimized away by Common
* Subexpression Elimination (CSE).
*
* Ops are marked as stateful if their behavior depends on some state beyond
* their input tensors (e.g. variable reading op) or if they have
* a side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
* must always produce the same output for the same input and have
* no side-effects.
* By default Ops may be moved between devices. Stateful ops should
* either not be moved, or should only be moved if that state can also
* be moved (e.g. via some sort of save / restore).
* Stateful ops are guaranteed to never be optimized away by Common
* Subexpression Elimination (CSE).
*
* By default, all inputs to an Op must be initialized Tensors. Ops
* that may initialize tensors for the first time should set this
* field to true, to allow the Op to take an uninitialized Tensor as
* input.
*
* By default, all inputs to an Op must be initialized Tensors. Ops
* that may initialize tensors for the first time should set this
* field to true, to allow the Op to take an uninitialized Tensor as
* input.
*
* By default, all inputs to an Op must be initialized Tensors. Ops
* that may initialize tensors for the first time should set this
* field to true, to allow the Op to take an uninitialized Tensor as
* input.
*
*
* bool allows_uninitialized_input = 19;
*/
public Builder clearAllowsUninitializedInput() {
allowsUninitializedInput_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.OpDef)
}
// @@protoc_insertion_point(class_scope:tensorflow.OpDef)
private static final org.tensorflow.framework.OpDef DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.OpDef();
}
public static org.tensorflow.framework.OpDef getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public OpDef parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new OpDef(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.tensorflow.framework.OpDef getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}