org.tensorflow.metadata.v0.NaturalLanguageStatistics Maven / Gradle / Ivy
The newest version!
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tensorflow_metadata/proto/v0/statistics.proto
// Protobuf Java Version: 3.25.4
package org.tensorflow.metadata.v0;
/**
*
* Statistics for a feature containing a NL domain.
*
*
* Protobuf type {@code tensorflow.metadata.v0.NaturalLanguageStatistics}
*/
public final class NaturalLanguageStatistics extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.metadata.v0.NaturalLanguageStatistics)
NaturalLanguageStatisticsOrBuilder {
private static final long serialVersionUID = 0L;
// Use NaturalLanguageStatistics.newBuilder() to construct.
private NaturalLanguageStatistics(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private NaturalLanguageStatistics() {
reportedSequences_ =
com.google.protobuf.LazyStringArrayList.emptyList();
tokenStatistics_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new NaturalLanguageStatistics();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.class, org.tensorflow.metadata.v0.NaturalLanguageStatistics.Builder.class);
}
public interface TokenStatisticsOrBuilder extends
// @@protoc_insertion_point(interface_extends:tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)
com.google.protobuf.MessageOrBuilder {
/**
* string string_token = 1;
* @return Whether the stringToken field is set.
*/
boolean hasStringToken();
/**
* string string_token = 1;
* @return The stringToken.
*/
java.lang.String getStringToken();
/**
* string string_token = 1;
* @return The bytes for stringToken.
*/
com.google.protobuf.ByteString
getStringTokenBytes();
/**
* int64 int_token = 2;
* @return Whether the intToken field is set.
*/
boolean hasIntToken();
/**
* int64 int_token = 2;
* @return The intToken.
*/
long getIntToken();
/**
*
* The number of times the value occurs. Stored as a double to be able to
* handle weighted features.
*
*
* double frequency = 3;
* @return The frequency.
*/
double getFrequency();
/**
*
* Fraction of sequences containing the token.
*
*
* double fraction_of_sequences = 4;
* @return The fractionOfSequences.
*/
double getFractionOfSequences();
/**
*
* Min number of token occurrences within a sequence.
*
*
* double per_sequence_min_frequency = 5;
* @return The perSequenceMinFrequency.
*/
double getPerSequenceMinFrequency();
/**
*
* Average number of token occurrences within a sequence.
*
*
* double per_sequence_avg_frequency = 6;
* @return The perSequenceAvgFrequency.
*/
double getPerSequenceAvgFrequency();
/**
*
* Maximum number of token occurrences within a sequence.
*
*
* double per_sequence_max_frequency = 7;
* @return The perSequenceMaxFrequency.
*/
double getPerSequenceMaxFrequency();
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
* @return Whether the positions field is set.
*/
boolean hasPositions();
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
* @return The positions.
*/
org.tensorflow.metadata.v0.Histogram getPositions();
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
org.tensorflow.metadata.v0.HistogramOrBuilder getPositionsOrBuilder();
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.TokenCase getTokenCase();
}
/**
* Protobuf type {@code tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics}
*/
public static final class TokenStatistics extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)
TokenStatisticsOrBuilder {
private static final long serialVersionUID = 0L;
// Use TokenStatistics.newBuilder() to construct.
private TokenStatistics(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private TokenStatistics() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TokenStatistics();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_TokenStatistics_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_TokenStatistics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.class, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder.class);
}
private int bitField0_;
private int tokenCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object token_;
public enum TokenCase
implements com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
STRING_TOKEN(1),
INT_TOKEN(2),
TOKEN_NOT_SET(0);
private final int value;
private TokenCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static TokenCase valueOf(int value) {
return forNumber(value);
}
public static TokenCase forNumber(int value) {
switch (value) {
case 1: return STRING_TOKEN;
case 2: return INT_TOKEN;
case 0: return TOKEN_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public TokenCase
getTokenCase() {
return TokenCase.forNumber(
tokenCase_);
}
public static final int STRING_TOKEN_FIELD_NUMBER = 1;
/**
* string string_token = 1;
* @return Whether the stringToken field is set.
*/
public boolean hasStringToken() {
return tokenCase_ == 1;
}
/**
* string string_token = 1;
* @return The stringToken.
*/
public java.lang.String getStringToken() {
java.lang.Object ref = "";
if (tokenCase_ == 1) {
ref = token_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tokenCase_ == 1) {
token_ = s;
}
return s;
}
}
/**
* string string_token = 1;
* @return The bytes for stringToken.
*/
public com.google.protobuf.ByteString
getStringTokenBytes() {
java.lang.Object ref = "";
if (tokenCase_ == 1) {
ref = token_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (tokenCase_ == 1) {
token_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INT_TOKEN_FIELD_NUMBER = 2;
/**
* int64 int_token = 2;
* @return Whether the intToken field is set.
*/
@java.lang.Override
public boolean hasIntToken() {
return tokenCase_ == 2;
}
/**
* int64 int_token = 2;
* @return The intToken.
*/
@java.lang.Override
public long getIntToken() {
if (tokenCase_ == 2) {
return (java.lang.Long) token_;
}
return 0L;
}
public static final int FREQUENCY_FIELD_NUMBER = 3;
private double frequency_ = 0D;
/**
*
* The number of times the value occurs. Stored as a double to be able to
* handle weighted features.
*
*
* double frequency = 3;
* @return The frequency.
*/
@java.lang.Override
public double getFrequency() {
return frequency_;
}
public static final int FRACTION_OF_SEQUENCES_FIELD_NUMBER = 4;
private double fractionOfSequences_ = 0D;
/**
*
* Fraction of sequences containing the token.
*
*
* double fraction_of_sequences = 4;
* @return The fractionOfSequences.
*/
@java.lang.Override
public double getFractionOfSequences() {
return fractionOfSequences_;
}
public static final int PER_SEQUENCE_MIN_FREQUENCY_FIELD_NUMBER = 5;
private double perSequenceMinFrequency_ = 0D;
/**
*
* Min number of token occurrences within a sequence.
*
*
* double per_sequence_min_frequency = 5;
* @return The perSequenceMinFrequency.
*/
@java.lang.Override
public double getPerSequenceMinFrequency() {
return perSequenceMinFrequency_;
}
public static final int PER_SEQUENCE_AVG_FREQUENCY_FIELD_NUMBER = 6;
private double perSequenceAvgFrequency_ = 0D;
/**
*
* Average number of token occurrences within a sequence.
*
*
* double per_sequence_avg_frequency = 6;
* @return The perSequenceAvgFrequency.
*/
@java.lang.Override
public double getPerSequenceAvgFrequency() {
return perSequenceAvgFrequency_;
}
public static final int PER_SEQUENCE_MAX_FREQUENCY_FIELD_NUMBER = 7;
private double perSequenceMaxFrequency_ = 0D;
/**
*
* Maximum number of token occurrences within a sequence.
*
*
* double per_sequence_max_frequency = 7;
* @return The perSequenceMaxFrequency.
*/
@java.lang.Override
public double getPerSequenceMaxFrequency() {
return perSequenceMaxFrequency_;
}
public static final int POSITIONS_FIELD_NUMBER = 8;
private org.tensorflow.metadata.v0.Histogram positions_;
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
* @return Whether the positions field is set.
*/
@java.lang.Override
public boolean hasPositions() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
* @return The positions.
*/
@java.lang.Override
public org.tensorflow.metadata.v0.Histogram getPositions() {
return positions_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : positions_;
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.HistogramOrBuilder getPositionsOrBuilder() {
return positions_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : positions_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (tokenCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, token_);
}
if (tokenCase_ == 2) {
output.writeInt64(
2, (long)((java.lang.Long) token_));
}
if (java.lang.Double.doubleToRawLongBits(frequency_) != 0) {
output.writeDouble(3, frequency_);
}
if (java.lang.Double.doubleToRawLongBits(fractionOfSequences_) != 0) {
output.writeDouble(4, fractionOfSequences_);
}
if (java.lang.Double.doubleToRawLongBits(perSequenceMinFrequency_) != 0) {
output.writeDouble(5, perSequenceMinFrequency_);
}
if (java.lang.Double.doubleToRawLongBits(perSequenceAvgFrequency_) != 0) {
output.writeDouble(6, perSequenceAvgFrequency_);
}
if (java.lang.Double.doubleToRawLongBits(perSequenceMaxFrequency_) != 0) {
output.writeDouble(7, perSequenceMaxFrequency_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(8, getPositions());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (tokenCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, token_);
}
if (tokenCase_ == 2) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(
2, (long)((java.lang.Long) token_));
}
if (java.lang.Double.doubleToRawLongBits(frequency_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(3, frequency_);
}
if (java.lang.Double.doubleToRawLongBits(fractionOfSequences_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(4, fractionOfSequences_);
}
if (java.lang.Double.doubleToRawLongBits(perSequenceMinFrequency_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(5, perSequenceMinFrequency_);
}
if (java.lang.Double.doubleToRawLongBits(perSequenceAvgFrequency_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(6, perSequenceAvgFrequency_);
}
if (java.lang.Double.doubleToRawLongBits(perSequenceMaxFrequency_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(7, perSequenceMaxFrequency_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, getPositions());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)) {
return super.equals(obj);
}
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics other = (org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics) obj;
if (java.lang.Double.doubleToLongBits(getFrequency())
!= java.lang.Double.doubleToLongBits(
other.getFrequency())) return false;
if (java.lang.Double.doubleToLongBits(getFractionOfSequences())
!= java.lang.Double.doubleToLongBits(
other.getFractionOfSequences())) return false;
if (java.lang.Double.doubleToLongBits(getPerSequenceMinFrequency())
!= java.lang.Double.doubleToLongBits(
other.getPerSequenceMinFrequency())) return false;
if (java.lang.Double.doubleToLongBits(getPerSequenceAvgFrequency())
!= java.lang.Double.doubleToLongBits(
other.getPerSequenceAvgFrequency())) return false;
if (java.lang.Double.doubleToLongBits(getPerSequenceMaxFrequency())
!= java.lang.Double.doubleToLongBits(
other.getPerSequenceMaxFrequency())) return false;
if (hasPositions() != other.hasPositions()) return false;
if (hasPositions()) {
if (!getPositions()
.equals(other.getPositions())) return false;
}
if (!getTokenCase().equals(other.getTokenCase())) return false;
switch (tokenCase_) {
case 1:
if (!getStringToken()
.equals(other.getStringToken())) return false;
break;
case 2:
if (getIntToken()
!= other.getIntToken()) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + FREQUENCY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getFrequency()));
hash = (37 * hash) + FRACTION_OF_SEQUENCES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getFractionOfSequences()));
hash = (37 * hash) + PER_SEQUENCE_MIN_FREQUENCY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getPerSequenceMinFrequency()));
hash = (37 * hash) + PER_SEQUENCE_AVG_FREQUENCY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getPerSequenceAvgFrequency()));
hash = (37 * hash) + PER_SEQUENCE_MAX_FREQUENCY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getPerSequenceMaxFrequency()));
if (hasPositions()) {
hash = (37 * hash) + POSITIONS_FIELD_NUMBER;
hash = (53 * hash) + getPositions().hashCode();
}
switch (tokenCase_) {
case 1:
hash = (37 * hash) + STRING_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getStringToken().hashCode();
break;
case 2:
hash = (37 * hash) + INT_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getIntToken());
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_TokenStatistics_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_TokenStatistics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.class, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder.class);
}
// Construct using org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getPositionsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
frequency_ = 0D;
fractionOfSequences_ = 0D;
perSequenceMinFrequency_ = 0D;
perSequenceAvgFrequency_ = 0D;
perSequenceMaxFrequency_ = 0D;
positions_ = null;
if (positionsBuilder_ != null) {
positionsBuilder_.dispose();
positionsBuilder_ = null;
}
tokenCase_ = 0;
token_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_TokenStatistics_descriptor;
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics getDefaultInstanceForType() {
return org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.getDefaultInstance();
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics build() {
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics buildPartial() {
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics result = new org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics(this);
if (bitField0_ != 0) { buildPartial0(result); }
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.frequency_ = frequency_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.fractionOfSequences_ = fractionOfSequences_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.perSequenceMinFrequency_ = perSequenceMinFrequency_;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.perSequenceAvgFrequency_ = perSequenceAvgFrequency_;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.perSequenceMaxFrequency_ = perSequenceMaxFrequency_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000080) != 0)) {
result.positions_ = positionsBuilder_ == null
? positions_
: positionsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
private void buildPartialOneofs(org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics result) {
result.tokenCase_ = tokenCase_;
result.token_ = this.token_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics) {
return mergeFrom((org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics other) {
if (other == org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.getDefaultInstance()) return this;
if (other.getFrequency() != 0D) {
setFrequency(other.getFrequency());
}
if (other.getFractionOfSequences() != 0D) {
setFractionOfSequences(other.getFractionOfSequences());
}
if (other.getPerSequenceMinFrequency() != 0D) {
setPerSequenceMinFrequency(other.getPerSequenceMinFrequency());
}
if (other.getPerSequenceAvgFrequency() != 0D) {
setPerSequenceAvgFrequency(other.getPerSequenceAvgFrequency());
}
if (other.getPerSequenceMaxFrequency() != 0D) {
setPerSequenceMaxFrequency(other.getPerSequenceMaxFrequency());
}
if (other.hasPositions()) {
mergePositions(other.getPositions());
}
switch (other.getTokenCase()) {
case STRING_TOKEN: {
tokenCase_ = 1;
token_ = other.token_;
onChanged();
break;
}
case INT_TOKEN: {
setIntToken(other.getIntToken());
break;
}
case TOKEN_NOT_SET: {
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
tokenCase_ = 1;
token_ = s;
break;
} // case 10
case 16: {
token_ = input.readInt64();
tokenCase_ = 2;
break;
} // case 16
case 25: {
frequency_ = input.readDouble();
bitField0_ |= 0x00000004;
break;
} // case 25
case 33: {
fractionOfSequences_ = input.readDouble();
bitField0_ |= 0x00000008;
break;
} // case 33
case 41: {
perSequenceMinFrequency_ = input.readDouble();
bitField0_ |= 0x00000010;
break;
} // case 41
case 49: {
perSequenceAvgFrequency_ = input.readDouble();
bitField0_ |= 0x00000020;
break;
} // case 49
case 57: {
perSequenceMaxFrequency_ = input.readDouble();
bitField0_ |= 0x00000040;
break;
} // case 57
case 66: {
input.readMessage(
getPositionsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000080;
break;
} // case 66
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int tokenCase_ = 0;
private java.lang.Object token_;
public TokenCase
getTokenCase() {
return TokenCase.forNumber(
tokenCase_);
}
public Builder clearToken() {
tokenCase_ = 0;
token_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
* string string_token = 1;
* @return Whether the stringToken field is set.
*/
@java.lang.Override
public boolean hasStringToken() {
return tokenCase_ == 1;
}
/**
* string string_token = 1;
* @return The stringToken.
*/
@java.lang.Override
public java.lang.String getStringToken() {
java.lang.Object ref = "";
if (tokenCase_ == 1) {
ref = token_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (tokenCase_ == 1) {
token_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string string_token = 1;
* @return The bytes for stringToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getStringTokenBytes() {
java.lang.Object ref = "";
if (tokenCase_ == 1) {
ref = token_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (tokenCase_ == 1) {
token_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* string string_token = 1;
* @param value The stringToken to set.
* @return This builder for chaining.
*/
public Builder setStringToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
tokenCase_ = 1;
token_ = value;
onChanged();
return this;
}
/**
* string string_token = 1;
* @return This builder for chaining.
*/
public Builder clearStringToken() {
if (tokenCase_ == 1) {
tokenCase_ = 0;
token_ = null;
onChanged();
}
return this;
}
/**
* string string_token = 1;
* @param value The bytes for stringToken to set.
* @return This builder for chaining.
*/
public Builder setStringTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
tokenCase_ = 1;
token_ = value;
onChanged();
return this;
}
/**
* int64 int_token = 2;
* @return Whether the intToken field is set.
*/
public boolean hasIntToken() {
return tokenCase_ == 2;
}
/**
* int64 int_token = 2;
* @return The intToken.
*/
public long getIntToken() {
if (tokenCase_ == 2) {
return (java.lang.Long) token_;
}
return 0L;
}
/**
* int64 int_token = 2;
* @param value The intToken to set.
* @return This builder for chaining.
*/
public Builder setIntToken(long value) {
tokenCase_ = 2;
token_ = value;
onChanged();
return this;
}
/**
* int64 int_token = 2;
* @return This builder for chaining.
*/
public Builder clearIntToken() {
if (tokenCase_ == 2) {
tokenCase_ = 0;
token_ = null;
onChanged();
}
return this;
}
private double frequency_ ;
/**
*
* The number of times the value occurs. Stored as a double to be able to
* handle weighted features.
*
*
* double frequency = 3;
* @return The frequency.
*/
@java.lang.Override
public double getFrequency() {
return frequency_;
}
/**
*
* The number of times the value occurs. Stored as a double to be able to
* handle weighted features.
*
*
* double frequency = 3;
* @param value The frequency to set.
* @return This builder for chaining.
*/
public Builder setFrequency(double value) {
frequency_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
* The number of times the value occurs. Stored as a double to be able to
* handle weighted features.
*
*
* double frequency = 3;
* @return This builder for chaining.
*/
public Builder clearFrequency() {
bitField0_ = (bitField0_ & ~0x00000004);
frequency_ = 0D;
onChanged();
return this;
}
private double fractionOfSequences_ ;
/**
*
* Fraction of sequences containing the token.
*
*
* double fraction_of_sequences = 4;
* @return The fractionOfSequences.
*/
@java.lang.Override
public double getFractionOfSequences() {
return fractionOfSequences_;
}
/**
*
* Fraction of sequences containing the token.
*
*
* double fraction_of_sequences = 4;
* @param value The fractionOfSequences to set.
* @return This builder for chaining.
*/
public Builder setFractionOfSequences(double value) {
fractionOfSequences_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
* Fraction of sequences containing the token.
*
*
* double fraction_of_sequences = 4;
* @return This builder for chaining.
*/
public Builder clearFractionOfSequences() {
bitField0_ = (bitField0_ & ~0x00000008);
fractionOfSequences_ = 0D;
onChanged();
return this;
}
private double perSequenceMinFrequency_ ;
/**
*
* Min number of token occurrences within a sequence.
*
*
* double per_sequence_min_frequency = 5;
* @return The perSequenceMinFrequency.
*/
@java.lang.Override
public double getPerSequenceMinFrequency() {
return perSequenceMinFrequency_;
}
/**
*
* Min number of token occurrences within a sequence.
*
*
* double per_sequence_min_frequency = 5;
* @param value The perSequenceMinFrequency to set.
* @return This builder for chaining.
*/
public Builder setPerSequenceMinFrequency(double value) {
perSequenceMinFrequency_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
* Min number of token occurrences within a sequence.
*
*
* double per_sequence_min_frequency = 5;
* @return This builder for chaining.
*/
public Builder clearPerSequenceMinFrequency() {
bitField0_ = (bitField0_ & ~0x00000010);
perSequenceMinFrequency_ = 0D;
onChanged();
return this;
}
private double perSequenceAvgFrequency_ ;
/**
*
* Average number of token occurrences within a sequence.
*
*
* double per_sequence_avg_frequency = 6;
* @return The perSequenceAvgFrequency.
*/
@java.lang.Override
public double getPerSequenceAvgFrequency() {
return perSequenceAvgFrequency_;
}
/**
*
* Average number of token occurrences within a sequence.
*
*
* double per_sequence_avg_frequency = 6;
* @param value The perSequenceAvgFrequency to set.
* @return This builder for chaining.
*/
public Builder setPerSequenceAvgFrequency(double value) {
perSequenceAvgFrequency_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
* Average number of token occurrences within a sequence.
*
*
* double per_sequence_avg_frequency = 6;
* @return This builder for chaining.
*/
public Builder clearPerSequenceAvgFrequency() {
bitField0_ = (bitField0_ & ~0x00000020);
perSequenceAvgFrequency_ = 0D;
onChanged();
return this;
}
private double perSequenceMaxFrequency_ ;
/**
*
* Maximum number of token occurrences within a sequence.
*
*
* double per_sequence_max_frequency = 7;
* @return The perSequenceMaxFrequency.
*/
@java.lang.Override
public double getPerSequenceMaxFrequency() {
return perSequenceMaxFrequency_;
}
/**
*
* Maximum number of token occurrences within a sequence.
*
*
* double per_sequence_max_frequency = 7;
* @param value The perSequenceMaxFrequency to set.
* @return This builder for chaining.
*/
public Builder setPerSequenceMaxFrequency(double value) {
perSequenceMaxFrequency_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
* Maximum number of token occurrences within a sequence.
*
*
* double per_sequence_max_frequency = 7;
* @return This builder for chaining.
*/
public Builder clearPerSequenceMaxFrequency() {
bitField0_ = (bitField0_ & ~0x00000040);
perSequenceMaxFrequency_ = 0D;
onChanged();
return this;
}
private org.tensorflow.metadata.v0.Histogram positions_;
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder> positionsBuilder_;
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
* @return Whether the positions field is set.
*/
public boolean hasPositions() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
* @return The positions.
*/
public org.tensorflow.metadata.v0.Histogram getPositions() {
if (positionsBuilder_ == null) {
return positions_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : positions_;
} else {
return positionsBuilder_.getMessage();
}
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
public Builder setPositions(org.tensorflow.metadata.v0.Histogram value) {
if (positionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
positions_ = value;
} else {
positionsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
public Builder setPositions(
org.tensorflow.metadata.v0.Histogram.Builder builderForValue) {
if (positionsBuilder_ == null) {
positions_ = builderForValue.build();
} else {
positionsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
public Builder mergePositions(org.tensorflow.metadata.v0.Histogram value) {
if (positionsBuilder_ == null) {
if (((bitField0_ & 0x00000080) != 0) &&
positions_ != null &&
positions_ != org.tensorflow.metadata.v0.Histogram.getDefaultInstance()) {
getPositionsBuilder().mergeFrom(value);
} else {
positions_ = value;
}
} else {
positionsBuilder_.mergeFrom(value);
}
if (positions_ != null) {
bitField0_ |= 0x00000080;
onChanged();
}
return this;
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
public Builder clearPositions() {
bitField0_ = (bitField0_ & ~0x00000080);
positions_ = null;
if (positionsBuilder_ != null) {
positionsBuilder_.dispose();
positionsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
public org.tensorflow.metadata.v0.Histogram.Builder getPositionsBuilder() {
bitField0_ |= 0x00000080;
onChanged();
return getPositionsFieldBuilder().getBuilder();
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
public org.tensorflow.metadata.v0.HistogramOrBuilder getPositionsOrBuilder() {
if (positionsBuilder_ != null) {
return positionsBuilder_.getMessageOrBuilder();
} else {
return positions_ == null ?
org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : positions_;
}
}
/**
*
* Token positions within a sequence. Normalized by sequence length.
* (e.g. a token that occurres in position 0.5 occurs in the middle of
* a sequence).
*
*
* .tensorflow.metadata.v0.Histogram positions = 8;
*/
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder>
getPositionsFieldBuilder() {
if (positionsBuilder_ == null) {
positionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder>(
getPositions(),
getParentForChildren(),
isClean());
positions_ = null;
}
return positionsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)
}
// @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics)
private static final org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics();
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() {
@java.lang.Override
public TokenStatistics parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private int bitField0_;
public static final int FEATURE_COVERAGE_FIELD_NUMBER = 1;
private double featureCoverage_ = 0D;
/**
*
* Fraction of feature input tokens considered in-vocab.
*
*
* double feature_coverage = 1;
* @return The featureCoverage.
*/
@java.lang.Override
public double getFeatureCoverage() {
return featureCoverage_;
}
public static final int AVG_TOKEN_LENGTH_FIELD_NUMBER = 2;
private double avgTokenLength_ = 0D;
/**
*
* Average token length of tokens used by the feature.
*
*
* double avg_token_length = 2;
* @return The avgTokenLength.
*/
@java.lang.Override
public double getAvgTokenLength() {
return avgTokenLength_;
}
public static final int TOKEN_LENGTH_HISTOGRAM_FIELD_NUMBER = 3;
private org.tensorflow.metadata.v0.Histogram tokenLengthHistogram_;
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
* @return Whether the tokenLengthHistogram field is set.
*/
@java.lang.Override
public boolean hasTokenLengthHistogram() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
* @return The tokenLengthHistogram.
*/
@java.lang.Override
public org.tensorflow.metadata.v0.Histogram getTokenLengthHistogram() {
return tokenLengthHistogram_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : tokenLengthHistogram_;
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.HistogramOrBuilder getTokenLengthHistogramOrBuilder() {
return tokenLengthHistogram_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : tokenLengthHistogram_;
}
public static final int MIN_SEQUENCE_LENGTH_FIELD_NUMBER = 10;
private long minSequenceLength_ = 0L;
/**
*
* Min / max sequence lengths.
*
*
* int64 min_sequence_length = 10;
* @return The minSequenceLength.
*/
@java.lang.Override
public long getMinSequenceLength() {
return minSequenceLength_;
}
public static final int MAX_SEQUENCE_LENGTH_FIELD_NUMBER = 11;
private long maxSequenceLength_ = 0L;
/**
* int64 max_sequence_length = 11;
* @return The maxSequenceLength.
*/
@java.lang.Override
public long getMaxSequenceLength() {
return maxSequenceLength_;
}
public static final int SEQUENCE_LENGTH_HISTOGRAM_FIELD_NUMBER = 9;
private org.tensorflow.metadata.v0.Histogram sequenceLengthHistogram_;
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
* @return Whether the sequenceLengthHistogram field is set.
*/
@java.lang.Override
public boolean hasSequenceLengthHistogram() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
* @return The sequenceLengthHistogram.
*/
@java.lang.Override
public org.tensorflow.metadata.v0.Histogram getSequenceLengthHistogram() {
return sequenceLengthHistogram_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : sequenceLengthHistogram_;
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.HistogramOrBuilder getSequenceLengthHistogramOrBuilder() {
return sequenceLengthHistogram_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : sequenceLengthHistogram_;
}
public static final int LOCATION_MISSES_FIELD_NUMBER = 4;
private long locationMisses_ = 0L;
/**
*
* Number of of sequences which do not match the location constraint.
*
*
* int64 location_misses = 4;
* @return The locationMisses.
*/
@java.lang.Override
public long getLocationMisses() {
return locationMisses_;
}
public static final int REPORTED_SEQUENCES_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList reportedSequences_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @return A list containing the reportedSequences.
*/
public com.google.protobuf.ProtocolStringList
getReportedSequencesList() {
return reportedSequences_;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @return The count of reportedSequences.
*/
public int getReportedSequencesCount() {
return reportedSequences_.size();
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param index The index of the element to return.
* @return The reportedSequences at the given index.
*/
public java.lang.String getReportedSequences(int index) {
return reportedSequences_.get(index);
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param index The index of the value to return.
* @return The bytes of the reportedSequences at the given index.
*/
public com.google.protobuf.ByteString
getReportedSequencesBytes(int index) {
return reportedSequences_.getByteString(index);
}
public static final int TOKEN_STATISTICS_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private java.util.List tokenStatistics_;
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
@java.lang.Override
public java.util.List getTokenStatisticsList() {
return tokenStatistics_;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
@java.lang.Override
public java.util.List extends org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder>
getTokenStatisticsOrBuilderList() {
return tokenStatistics_;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
@java.lang.Override
public int getTokenStatisticsCount() {
return tokenStatistics_.size();
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics getTokenStatistics(int index) {
return tokenStatistics_.get(index);
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder getTokenStatisticsOrBuilder(
int index) {
return tokenStatistics_.get(index);
}
public static final int RANK_HISTOGRAM_FIELD_NUMBER = 7;
private org.tensorflow.metadata.v0.RankHistogram rankHistogram_;
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
* @return Whether the rankHistogram field is set.
*/
@java.lang.Override
public boolean hasRankHistogram() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
* @return The rankHistogram.
*/
@java.lang.Override
public org.tensorflow.metadata.v0.RankHistogram getRankHistogram() {
return rankHistogram_ == null ? org.tensorflow.metadata.v0.RankHistogram.getDefaultInstance() : rankHistogram_;
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.RankHistogramOrBuilder getRankHistogramOrBuilder() {
return rankHistogram_ == null ? org.tensorflow.metadata.v0.RankHistogram.getDefaultInstance() : rankHistogram_;
}
public static final int WEIGHTED_NL_STATISTICS_FIELD_NUMBER = 8;
private org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weightedNlStatistics_;
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
* @return Whether the weightedNlStatistics field is set.
*/
@java.lang.Override
public boolean hasWeightedNlStatistics() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
* @return The weightedNlStatistics.
*/
@java.lang.Override
public org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics getWeightedNlStatistics() {
return weightedNlStatistics_ == null ? org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.getDefaultInstance() : weightedNlStatistics_;
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
@java.lang.Override
public org.tensorflow.metadata.v0.WeightedNaturalLanguageStatisticsOrBuilder getWeightedNlStatisticsOrBuilder() {
return weightedNlStatistics_ == null ? org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.getDefaultInstance() : weightedNlStatistics_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (java.lang.Double.doubleToRawLongBits(featureCoverage_) != 0) {
output.writeDouble(1, featureCoverage_);
}
if (java.lang.Double.doubleToRawLongBits(avgTokenLength_) != 0) {
output.writeDouble(2, avgTokenLength_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getTokenLengthHistogram());
}
if (locationMisses_ != 0L) {
output.writeInt64(4, locationMisses_);
}
for (int i = 0; i < reportedSequences_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, reportedSequences_.getRaw(i));
}
for (int i = 0; i < tokenStatistics_.size(); i++) {
output.writeMessage(6, tokenStatistics_.get(i));
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(7, getRankHistogram());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(8, getWeightedNlStatistics());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(9, getSequenceLengthHistogram());
}
if (minSequenceLength_ != 0L) {
output.writeInt64(10, minSequenceLength_);
}
if (maxSequenceLength_ != 0L) {
output.writeInt64(11, maxSequenceLength_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (java.lang.Double.doubleToRawLongBits(featureCoverage_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(1, featureCoverage_);
}
if (java.lang.Double.doubleToRawLongBits(avgTokenLength_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(2, avgTokenLength_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getTokenLengthHistogram());
}
if (locationMisses_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, locationMisses_);
}
{
int dataSize = 0;
for (int i = 0; i < reportedSequences_.size(); i++) {
dataSize += computeStringSizeNoTag(reportedSequences_.getRaw(i));
}
size += dataSize;
size += 1 * getReportedSequencesList().size();
}
for (int i = 0; i < tokenStatistics_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(6, tokenStatistics_.get(i));
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(7, getRankHistogram());
}
if (((bitField0_ & 0x00000008) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, getWeightedNlStatistics());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(9, getSequenceLengthHistogram());
}
if (minSequenceLength_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(10, minSequenceLength_);
}
if (maxSequenceLength_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(11, maxSequenceLength_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.metadata.v0.NaturalLanguageStatistics)) {
return super.equals(obj);
}
org.tensorflow.metadata.v0.NaturalLanguageStatistics other = (org.tensorflow.metadata.v0.NaturalLanguageStatistics) obj;
if (java.lang.Double.doubleToLongBits(getFeatureCoverage())
!= java.lang.Double.doubleToLongBits(
other.getFeatureCoverage())) return false;
if (java.lang.Double.doubleToLongBits(getAvgTokenLength())
!= java.lang.Double.doubleToLongBits(
other.getAvgTokenLength())) return false;
if (hasTokenLengthHistogram() != other.hasTokenLengthHistogram()) return false;
if (hasTokenLengthHistogram()) {
if (!getTokenLengthHistogram()
.equals(other.getTokenLengthHistogram())) return false;
}
if (getMinSequenceLength()
!= other.getMinSequenceLength()) return false;
if (getMaxSequenceLength()
!= other.getMaxSequenceLength()) return false;
if (hasSequenceLengthHistogram() != other.hasSequenceLengthHistogram()) return false;
if (hasSequenceLengthHistogram()) {
if (!getSequenceLengthHistogram()
.equals(other.getSequenceLengthHistogram())) return false;
}
if (getLocationMisses()
!= other.getLocationMisses()) return false;
if (!getReportedSequencesList()
.equals(other.getReportedSequencesList())) return false;
if (!getTokenStatisticsList()
.equals(other.getTokenStatisticsList())) return false;
if (hasRankHistogram() != other.hasRankHistogram()) return false;
if (hasRankHistogram()) {
if (!getRankHistogram()
.equals(other.getRankHistogram())) return false;
}
if (hasWeightedNlStatistics() != other.hasWeightedNlStatistics()) return false;
if (hasWeightedNlStatistics()) {
if (!getWeightedNlStatistics()
.equals(other.getWeightedNlStatistics())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + FEATURE_COVERAGE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getFeatureCoverage()));
hash = (37 * hash) + AVG_TOKEN_LENGTH_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getAvgTokenLength()));
if (hasTokenLengthHistogram()) {
hash = (37 * hash) + TOKEN_LENGTH_HISTOGRAM_FIELD_NUMBER;
hash = (53 * hash) + getTokenLengthHistogram().hashCode();
}
hash = (37 * hash) + MIN_SEQUENCE_LENGTH_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getMinSequenceLength());
hash = (37 * hash) + MAX_SEQUENCE_LENGTH_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getMaxSequenceLength());
if (hasSequenceLengthHistogram()) {
hash = (37 * hash) + SEQUENCE_LENGTH_HISTOGRAM_FIELD_NUMBER;
hash = (53 * hash) + getSequenceLengthHistogram().hashCode();
}
hash = (37 * hash) + LOCATION_MISSES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getLocationMisses());
if (getReportedSequencesCount() > 0) {
hash = (37 * hash) + REPORTED_SEQUENCES_FIELD_NUMBER;
hash = (53 * hash) + getReportedSequencesList().hashCode();
}
if (getTokenStatisticsCount() > 0) {
hash = (37 * hash) + TOKEN_STATISTICS_FIELD_NUMBER;
hash = (53 * hash) + getTokenStatisticsList().hashCode();
}
if (hasRankHistogram()) {
hash = (37 * hash) + RANK_HISTOGRAM_FIELD_NUMBER;
hash = (53 * hash) + getRankHistogram().hashCode();
}
if (hasWeightedNlStatistics()) {
hash = (37 * hash) + WEIGHTED_NL_STATISTICS_FIELD_NUMBER;
hash = (53 * hash) + getWeightedNlStatistics().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.metadata.v0.NaturalLanguageStatistics prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Statistics for a feature containing a NL domain.
*
*
* Protobuf type {@code tensorflow.metadata.v0.NaturalLanguageStatistics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.metadata.v0.NaturalLanguageStatistics)
org.tensorflow.metadata.v0.NaturalLanguageStatisticsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.class, org.tensorflow.metadata.v0.NaturalLanguageStatistics.Builder.class);
}
// Construct using org.tensorflow.metadata.v0.NaturalLanguageStatistics.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTokenLengthHistogramFieldBuilder();
getSequenceLengthHistogramFieldBuilder();
getTokenStatisticsFieldBuilder();
getRankHistogramFieldBuilder();
getWeightedNlStatisticsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
featureCoverage_ = 0D;
avgTokenLength_ = 0D;
tokenLengthHistogram_ = null;
if (tokenLengthHistogramBuilder_ != null) {
tokenLengthHistogramBuilder_.dispose();
tokenLengthHistogramBuilder_ = null;
}
minSequenceLength_ = 0L;
maxSequenceLength_ = 0L;
sequenceLengthHistogram_ = null;
if (sequenceLengthHistogramBuilder_ != null) {
sequenceLengthHistogramBuilder_.dispose();
sequenceLengthHistogramBuilder_ = null;
}
locationMisses_ = 0L;
reportedSequences_ =
com.google.protobuf.LazyStringArrayList.emptyList();
if (tokenStatisticsBuilder_ == null) {
tokenStatistics_ = java.util.Collections.emptyList();
} else {
tokenStatistics_ = null;
tokenStatisticsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000100);
rankHistogram_ = null;
if (rankHistogramBuilder_ != null) {
rankHistogramBuilder_.dispose();
rankHistogramBuilder_ = null;
}
weightedNlStatistics_ = null;
if (weightedNlStatisticsBuilder_ != null) {
weightedNlStatisticsBuilder_.dispose();
weightedNlStatisticsBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_NaturalLanguageStatistics_descriptor;
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics getDefaultInstanceForType() {
return org.tensorflow.metadata.v0.NaturalLanguageStatistics.getDefaultInstance();
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics build() {
org.tensorflow.metadata.v0.NaturalLanguageStatistics result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics buildPartial() {
org.tensorflow.metadata.v0.NaturalLanguageStatistics result = new org.tensorflow.metadata.v0.NaturalLanguageStatistics(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(org.tensorflow.metadata.v0.NaturalLanguageStatistics result) {
if (tokenStatisticsBuilder_ == null) {
if (((bitField0_ & 0x00000100) != 0)) {
tokenStatistics_ = java.util.Collections.unmodifiableList(tokenStatistics_);
bitField0_ = (bitField0_ & ~0x00000100);
}
result.tokenStatistics_ = tokenStatistics_;
} else {
result.tokenStatistics_ = tokenStatisticsBuilder_.build();
}
}
private void buildPartial0(org.tensorflow.metadata.v0.NaturalLanguageStatistics result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.featureCoverage_ = featureCoverage_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.avgTokenLength_ = avgTokenLength_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.tokenLengthHistogram_ = tokenLengthHistogramBuilder_ == null
? tokenLengthHistogram_
: tokenLengthHistogramBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.minSequenceLength_ = minSequenceLength_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.maxSequenceLength_ = maxSequenceLength_;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.sequenceLengthHistogram_ = sequenceLengthHistogramBuilder_ == null
? sequenceLengthHistogram_
: sequenceLengthHistogramBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.locationMisses_ = locationMisses_;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
reportedSequences_.makeImmutable();
result.reportedSequences_ = reportedSequences_;
}
if (((from_bitField0_ & 0x00000200) != 0)) {
result.rankHistogram_ = rankHistogramBuilder_ == null
? rankHistogram_
: rankHistogramBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000400) != 0)) {
result.weightedNlStatistics_ = weightedNlStatisticsBuilder_ == null
? weightedNlStatistics_
: weightedNlStatisticsBuilder_.build();
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.tensorflow.metadata.v0.NaturalLanguageStatistics) {
return mergeFrom((org.tensorflow.metadata.v0.NaturalLanguageStatistics)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.metadata.v0.NaturalLanguageStatistics other) {
if (other == org.tensorflow.metadata.v0.NaturalLanguageStatistics.getDefaultInstance()) return this;
if (other.getFeatureCoverage() != 0D) {
setFeatureCoverage(other.getFeatureCoverage());
}
if (other.getAvgTokenLength() != 0D) {
setAvgTokenLength(other.getAvgTokenLength());
}
if (other.hasTokenLengthHistogram()) {
mergeTokenLengthHistogram(other.getTokenLengthHistogram());
}
if (other.getMinSequenceLength() != 0L) {
setMinSequenceLength(other.getMinSequenceLength());
}
if (other.getMaxSequenceLength() != 0L) {
setMaxSequenceLength(other.getMaxSequenceLength());
}
if (other.hasSequenceLengthHistogram()) {
mergeSequenceLengthHistogram(other.getSequenceLengthHistogram());
}
if (other.getLocationMisses() != 0L) {
setLocationMisses(other.getLocationMisses());
}
if (!other.reportedSequences_.isEmpty()) {
if (reportedSequences_.isEmpty()) {
reportedSequences_ = other.reportedSequences_;
bitField0_ |= 0x00000080;
} else {
ensureReportedSequencesIsMutable();
reportedSequences_.addAll(other.reportedSequences_);
}
onChanged();
}
if (tokenStatisticsBuilder_ == null) {
if (!other.tokenStatistics_.isEmpty()) {
if (tokenStatistics_.isEmpty()) {
tokenStatistics_ = other.tokenStatistics_;
bitField0_ = (bitField0_ & ~0x00000100);
} else {
ensureTokenStatisticsIsMutable();
tokenStatistics_.addAll(other.tokenStatistics_);
}
onChanged();
}
} else {
if (!other.tokenStatistics_.isEmpty()) {
if (tokenStatisticsBuilder_.isEmpty()) {
tokenStatisticsBuilder_.dispose();
tokenStatisticsBuilder_ = null;
tokenStatistics_ = other.tokenStatistics_;
bitField0_ = (bitField0_ & ~0x00000100);
tokenStatisticsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTokenStatisticsFieldBuilder() : null;
} else {
tokenStatisticsBuilder_.addAllMessages(other.tokenStatistics_);
}
}
}
if (other.hasRankHistogram()) {
mergeRankHistogram(other.getRankHistogram());
}
if (other.hasWeightedNlStatistics()) {
mergeWeightedNlStatistics(other.getWeightedNlStatistics());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 9: {
featureCoverage_ = input.readDouble();
bitField0_ |= 0x00000001;
break;
} // case 9
case 17: {
avgTokenLength_ = input.readDouble();
bitField0_ |= 0x00000002;
break;
} // case 17
case 26: {
input.readMessage(
getTokenLengthHistogramFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 32: {
locationMisses_ = input.readInt64();
bitField0_ |= 0x00000040;
break;
} // case 32
case 42: {
java.lang.String s = input.readStringRequireUtf8();
ensureReportedSequencesIsMutable();
reportedSequences_.add(s);
break;
} // case 42
case 50: {
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics m =
input.readMessage(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.parser(),
extensionRegistry);
if (tokenStatisticsBuilder_ == null) {
ensureTokenStatisticsIsMutable();
tokenStatistics_.add(m);
} else {
tokenStatisticsBuilder_.addMessage(m);
}
break;
} // case 50
case 58: {
input.readMessage(
getRankHistogramFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000200;
break;
} // case 58
case 66: {
input.readMessage(
getWeightedNlStatisticsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000400;
break;
} // case 66
case 74: {
input.readMessage(
getSequenceLengthHistogramFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000020;
break;
} // case 74
case 80: {
minSequenceLength_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 80
case 88: {
maxSequenceLength_ = input.readInt64();
bitField0_ |= 0x00000010;
break;
} // case 88
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private double featureCoverage_ ;
/**
*
* Fraction of feature input tokens considered in-vocab.
*
*
* double feature_coverage = 1;
* @return The featureCoverage.
*/
@java.lang.Override
public double getFeatureCoverage() {
return featureCoverage_;
}
/**
*
* Fraction of feature input tokens considered in-vocab.
*
*
* double feature_coverage = 1;
* @param value The featureCoverage to set.
* @return This builder for chaining.
*/
public Builder setFeatureCoverage(double value) {
featureCoverage_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
* Fraction of feature input tokens considered in-vocab.
*
*
* double feature_coverage = 1;
* @return This builder for chaining.
*/
public Builder clearFeatureCoverage() {
bitField0_ = (bitField0_ & ~0x00000001);
featureCoverage_ = 0D;
onChanged();
return this;
}
private double avgTokenLength_ ;
/**
*
* Average token length of tokens used by the feature.
*
*
* double avg_token_length = 2;
* @return The avgTokenLength.
*/
@java.lang.Override
public double getAvgTokenLength() {
return avgTokenLength_;
}
/**
*
* Average token length of tokens used by the feature.
*
*
* double avg_token_length = 2;
* @param value The avgTokenLength to set.
* @return This builder for chaining.
*/
public Builder setAvgTokenLength(double value) {
avgTokenLength_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
* Average token length of tokens used by the feature.
*
*
* double avg_token_length = 2;
* @return This builder for chaining.
*/
public Builder clearAvgTokenLength() {
bitField0_ = (bitField0_ & ~0x00000002);
avgTokenLength_ = 0D;
onChanged();
return this;
}
private org.tensorflow.metadata.v0.Histogram tokenLengthHistogram_;
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder> tokenLengthHistogramBuilder_;
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
* @return Whether the tokenLengthHistogram field is set.
*/
public boolean hasTokenLengthHistogram() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
* @return The tokenLengthHistogram.
*/
public org.tensorflow.metadata.v0.Histogram getTokenLengthHistogram() {
if (tokenLengthHistogramBuilder_ == null) {
return tokenLengthHistogram_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : tokenLengthHistogram_;
} else {
return tokenLengthHistogramBuilder_.getMessage();
}
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
public Builder setTokenLengthHistogram(org.tensorflow.metadata.v0.Histogram value) {
if (tokenLengthHistogramBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tokenLengthHistogram_ = value;
} else {
tokenLengthHistogramBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
public Builder setTokenLengthHistogram(
org.tensorflow.metadata.v0.Histogram.Builder builderForValue) {
if (tokenLengthHistogramBuilder_ == null) {
tokenLengthHistogram_ = builderForValue.build();
} else {
tokenLengthHistogramBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
public Builder mergeTokenLengthHistogram(org.tensorflow.metadata.v0.Histogram value) {
if (tokenLengthHistogramBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
tokenLengthHistogram_ != null &&
tokenLengthHistogram_ != org.tensorflow.metadata.v0.Histogram.getDefaultInstance()) {
getTokenLengthHistogramBuilder().mergeFrom(value);
} else {
tokenLengthHistogram_ = value;
}
} else {
tokenLengthHistogramBuilder_.mergeFrom(value);
}
if (tokenLengthHistogram_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
public Builder clearTokenLengthHistogram() {
bitField0_ = (bitField0_ & ~0x00000004);
tokenLengthHistogram_ = null;
if (tokenLengthHistogramBuilder_ != null) {
tokenLengthHistogramBuilder_.dispose();
tokenLengthHistogramBuilder_ = null;
}
onChanged();
return this;
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
public org.tensorflow.metadata.v0.Histogram.Builder getTokenLengthHistogramBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTokenLengthHistogramFieldBuilder().getBuilder();
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
public org.tensorflow.metadata.v0.HistogramOrBuilder getTokenLengthHistogramOrBuilder() {
if (tokenLengthHistogramBuilder_ != null) {
return tokenLengthHistogramBuilder_.getMessageOrBuilder();
} else {
return tokenLengthHistogram_ == null ?
org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : tokenLengthHistogram_;
}
}
/**
*
* Histogram containing the distribution of token lengths.
*
*
* .tensorflow.metadata.v0.Histogram token_length_histogram = 3;
*/
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder>
getTokenLengthHistogramFieldBuilder() {
if (tokenLengthHistogramBuilder_ == null) {
tokenLengthHistogramBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder>(
getTokenLengthHistogram(),
getParentForChildren(),
isClean());
tokenLengthHistogram_ = null;
}
return tokenLengthHistogramBuilder_;
}
private long minSequenceLength_ ;
/**
*
* Min / max sequence lengths.
*
*
* int64 min_sequence_length = 10;
* @return The minSequenceLength.
*/
@java.lang.Override
public long getMinSequenceLength() {
return minSequenceLength_;
}
/**
*
* Min / max sequence lengths.
*
*
* int64 min_sequence_length = 10;
* @param value The minSequenceLength to set.
* @return This builder for chaining.
*/
public Builder setMinSequenceLength(long value) {
minSequenceLength_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
* Min / max sequence lengths.
*
*
* int64 min_sequence_length = 10;
* @return This builder for chaining.
*/
public Builder clearMinSequenceLength() {
bitField0_ = (bitField0_ & ~0x00000008);
minSequenceLength_ = 0L;
onChanged();
return this;
}
private long maxSequenceLength_ ;
/**
* int64 max_sequence_length = 11;
* @return The maxSequenceLength.
*/
@java.lang.Override
public long getMaxSequenceLength() {
return maxSequenceLength_;
}
/**
* int64 max_sequence_length = 11;
* @param value The maxSequenceLength to set.
* @return This builder for chaining.
*/
public Builder setMaxSequenceLength(long value) {
maxSequenceLength_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
* int64 max_sequence_length = 11;
* @return This builder for chaining.
*/
public Builder clearMaxSequenceLength() {
bitField0_ = (bitField0_ & ~0x00000010);
maxSequenceLength_ = 0L;
onChanged();
return this;
}
private org.tensorflow.metadata.v0.Histogram sequenceLengthHistogram_;
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder> sequenceLengthHistogramBuilder_;
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
* @return Whether the sequenceLengthHistogram field is set.
*/
public boolean hasSequenceLengthHistogram() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
* @return The sequenceLengthHistogram.
*/
public org.tensorflow.metadata.v0.Histogram getSequenceLengthHistogram() {
if (sequenceLengthHistogramBuilder_ == null) {
return sequenceLengthHistogram_ == null ? org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : sequenceLengthHistogram_;
} else {
return sequenceLengthHistogramBuilder_.getMessage();
}
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
public Builder setSequenceLengthHistogram(org.tensorflow.metadata.v0.Histogram value) {
if (sequenceLengthHistogramBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sequenceLengthHistogram_ = value;
} else {
sequenceLengthHistogramBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
public Builder setSequenceLengthHistogram(
org.tensorflow.metadata.v0.Histogram.Builder builderForValue) {
if (sequenceLengthHistogramBuilder_ == null) {
sequenceLengthHistogram_ = builderForValue.build();
} else {
sequenceLengthHistogramBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
public Builder mergeSequenceLengthHistogram(org.tensorflow.metadata.v0.Histogram value) {
if (sequenceLengthHistogramBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0) &&
sequenceLengthHistogram_ != null &&
sequenceLengthHistogram_ != org.tensorflow.metadata.v0.Histogram.getDefaultInstance()) {
getSequenceLengthHistogramBuilder().mergeFrom(value);
} else {
sequenceLengthHistogram_ = value;
}
} else {
sequenceLengthHistogramBuilder_.mergeFrom(value);
}
if (sequenceLengthHistogram_ != null) {
bitField0_ |= 0x00000020;
onChanged();
}
return this;
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
public Builder clearSequenceLengthHistogram() {
bitField0_ = (bitField0_ & ~0x00000020);
sequenceLengthHistogram_ = null;
if (sequenceLengthHistogramBuilder_ != null) {
sequenceLengthHistogramBuilder_.dispose();
sequenceLengthHistogramBuilder_ = null;
}
onChanged();
return this;
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
public org.tensorflow.metadata.v0.Histogram.Builder getSequenceLengthHistogramBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getSequenceLengthHistogramFieldBuilder().getBuilder();
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
public org.tensorflow.metadata.v0.HistogramOrBuilder getSequenceLengthHistogramOrBuilder() {
if (sequenceLengthHistogramBuilder_ != null) {
return sequenceLengthHistogramBuilder_.getMessageOrBuilder();
} else {
return sequenceLengthHistogram_ == null ?
org.tensorflow.metadata.v0.Histogram.getDefaultInstance() : sequenceLengthHistogram_;
}
}
/**
*
* Histogram containing the distribution of sequence lengths.
*
*
* .tensorflow.metadata.v0.Histogram sequence_length_histogram = 9;
*/
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder>
getSequenceLengthHistogramFieldBuilder() {
if (sequenceLengthHistogramBuilder_ == null) {
sequenceLengthHistogramBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.Histogram, org.tensorflow.metadata.v0.Histogram.Builder, org.tensorflow.metadata.v0.HistogramOrBuilder>(
getSequenceLengthHistogram(),
getParentForChildren(),
isClean());
sequenceLengthHistogram_ = null;
}
return sequenceLengthHistogramBuilder_;
}
private long locationMisses_ ;
/**
*
* Number of of sequences which do not match the location constraint.
*
*
* int64 location_misses = 4;
* @return The locationMisses.
*/
@java.lang.Override
public long getLocationMisses() {
return locationMisses_;
}
/**
*
* Number of of sequences which do not match the location constraint.
*
*
* int64 location_misses = 4;
* @param value The locationMisses to set.
* @return This builder for chaining.
*/
public Builder setLocationMisses(long value) {
locationMisses_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
* Number of of sequences which do not match the location constraint.
*
*
* int64 location_misses = 4;
* @return This builder for chaining.
*/
public Builder clearLocationMisses() {
bitField0_ = (bitField0_ & ~0x00000040);
locationMisses_ = 0L;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList reportedSequences_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureReportedSequencesIsMutable() {
if (!reportedSequences_.isModifiable()) {
reportedSequences_ = new com.google.protobuf.LazyStringArrayList(reportedSequences_);
}
bitField0_ |= 0x00000080;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @return A list containing the reportedSequences.
*/
public com.google.protobuf.ProtocolStringList
getReportedSequencesList() {
reportedSequences_.makeImmutable();
return reportedSequences_;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @return The count of reportedSequences.
*/
public int getReportedSequencesCount() {
return reportedSequences_.size();
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param index The index of the element to return.
* @return The reportedSequences at the given index.
*/
public java.lang.String getReportedSequences(int index) {
return reportedSequences_.get(index);
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param index The index of the value to return.
* @return The bytes of the reportedSequences at the given index.
*/
public com.google.protobuf.ByteString
getReportedSequencesBytes(int index) {
return reportedSequences_.getByteString(index);
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param index The index to set the value at.
* @param value The reportedSequences to set.
* @return This builder for chaining.
*/
public Builder setReportedSequences(
int index, java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureReportedSequencesIsMutable();
reportedSequences_.set(index, value);
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param value The reportedSequences to add.
* @return This builder for chaining.
*/
public Builder addReportedSequences(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
ensureReportedSequencesIsMutable();
reportedSequences_.add(value);
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param values The reportedSequences to add.
* @return This builder for chaining.
*/
public Builder addAllReportedSequences(
java.lang.Iterable values) {
ensureReportedSequencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, reportedSequences_);
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @return This builder for chaining.
*/
public Builder clearReportedSequences() {
reportedSequences_ =
com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);;
onChanged();
return this;
}
/**
*
* Reported sequences that are sampled from the input and have small
* avg_token_length, low feature converage, or do not match the location
* regex.
*
*
* repeated string reported_sequences = 5;
* @param value The bytes of the reportedSequences to add.
* @return This builder for chaining.
*/
public Builder addReportedSequencesBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
ensureReportedSequencesIsMutable();
reportedSequences_.add(value);
bitField0_ |= 0x00000080;
onChanged();
return this;
}
private java.util.List tokenStatistics_ =
java.util.Collections.emptyList();
private void ensureTokenStatisticsIsMutable() {
if (!((bitField0_ & 0x00000100) != 0)) {
tokenStatistics_ = new java.util.ArrayList(tokenStatistics_);
bitField0_ |= 0x00000100;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder> tokenStatisticsBuilder_;
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public java.util.List getTokenStatisticsList() {
if (tokenStatisticsBuilder_ == null) {
return java.util.Collections.unmodifiableList(tokenStatistics_);
} else {
return tokenStatisticsBuilder_.getMessageList();
}
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public int getTokenStatisticsCount() {
if (tokenStatisticsBuilder_ == null) {
return tokenStatistics_.size();
} else {
return tokenStatisticsBuilder_.getCount();
}
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics getTokenStatistics(int index) {
if (tokenStatisticsBuilder_ == null) {
return tokenStatistics_.get(index);
} else {
return tokenStatisticsBuilder_.getMessage(index);
}
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder setTokenStatistics(
int index, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics value) {
if (tokenStatisticsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTokenStatisticsIsMutable();
tokenStatistics_.set(index, value);
onChanged();
} else {
tokenStatisticsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder setTokenStatistics(
int index, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder builderForValue) {
if (tokenStatisticsBuilder_ == null) {
ensureTokenStatisticsIsMutable();
tokenStatistics_.set(index, builderForValue.build());
onChanged();
} else {
tokenStatisticsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder addTokenStatistics(org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics value) {
if (tokenStatisticsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTokenStatisticsIsMutable();
tokenStatistics_.add(value);
onChanged();
} else {
tokenStatisticsBuilder_.addMessage(value);
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder addTokenStatistics(
int index, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics value) {
if (tokenStatisticsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTokenStatisticsIsMutable();
tokenStatistics_.add(index, value);
onChanged();
} else {
tokenStatisticsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder addTokenStatistics(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder builderForValue) {
if (tokenStatisticsBuilder_ == null) {
ensureTokenStatisticsIsMutable();
tokenStatistics_.add(builderForValue.build());
onChanged();
} else {
tokenStatisticsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder addTokenStatistics(
int index, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder builderForValue) {
if (tokenStatisticsBuilder_ == null) {
ensureTokenStatisticsIsMutable();
tokenStatistics_.add(index, builderForValue.build());
onChanged();
} else {
tokenStatisticsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder addAllTokenStatistics(
java.lang.Iterable extends org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics> values) {
if (tokenStatisticsBuilder_ == null) {
ensureTokenStatisticsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, tokenStatistics_);
onChanged();
} else {
tokenStatisticsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder clearTokenStatistics() {
if (tokenStatisticsBuilder_ == null) {
tokenStatistics_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000100);
onChanged();
} else {
tokenStatisticsBuilder_.clear();
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public Builder removeTokenStatistics(int index) {
if (tokenStatisticsBuilder_ == null) {
ensureTokenStatisticsIsMutable();
tokenStatistics_.remove(index);
onChanged();
} else {
tokenStatisticsBuilder_.remove(index);
}
return this;
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder getTokenStatisticsBuilder(
int index) {
return getTokenStatisticsFieldBuilder().getBuilder(index);
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder getTokenStatisticsOrBuilder(
int index) {
if (tokenStatisticsBuilder_ == null) {
return tokenStatistics_.get(index); } else {
return tokenStatisticsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public java.util.List extends org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder>
getTokenStatisticsOrBuilderList() {
if (tokenStatisticsBuilder_ != null) {
return tokenStatisticsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tokenStatistics_);
}
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder addTokenStatisticsBuilder() {
return getTokenStatisticsFieldBuilder().addBuilder(
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.getDefaultInstance());
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder addTokenStatisticsBuilder(
int index) {
return getTokenStatisticsFieldBuilder().addBuilder(
index, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.getDefaultInstance());
}
/**
*
* Statistics for specified tokens. TokenStatistics are only reported for
* tokens specified in SequenceValueConstraints in the schema.
*
*
* repeated .tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics token_statistics = 6;
*/
public java.util.List
getTokenStatisticsBuilderList() {
return getTokenStatisticsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder>
getTokenStatisticsFieldBuilder() {
if (tokenStatisticsBuilder_ == null) {
tokenStatisticsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatistics.Builder, org.tensorflow.metadata.v0.NaturalLanguageStatistics.TokenStatisticsOrBuilder>(
tokenStatistics_,
((bitField0_ & 0x00000100) != 0),
getParentForChildren(),
isClean());
tokenStatistics_ = null;
}
return tokenStatisticsBuilder_;
}
private org.tensorflow.metadata.v0.RankHistogram rankHistogram_;
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.RankHistogram, org.tensorflow.metadata.v0.RankHistogram.Builder, org.tensorflow.metadata.v0.RankHistogramOrBuilder> rankHistogramBuilder_;
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
* @return Whether the rankHistogram field is set.
*/
public boolean hasRankHistogram() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
* @return The rankHistogram.
*/
public org.tensorflow.metadata.v0.RankHistogram getRankHistogram() {
if (rankHistogramBuilder_ == null) {
return rankHistogram_ == null ? org.tensorflow.metadata.v0.RankHistogram.getDefaultInstance() : rankHistogram_;
} else {
return rankHistogramBuilder_.getMessage();
}
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
public Builder setRankHistogram(org.tensorflow.metadata.v0.RankHistogram value) {
if (rankHistogramBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
rankHistogram_ = value;
} else {
rankHistogramBuilder_.setMessage(value);
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
public Builder setRankHistogram(
org.tensorflow.metadata.v0.RankHistogram.Builder builderForValue) {
if (rankHistogramBuilder_ == null) {
rankHistogram_ = builderForValue.build();
} else {
rankHistogramBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000200;
onChanged();
return this;
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
public Builder mergeRankHistogram(org.tensorflow.metadata.v0.RankHistogram value) {
if (rankHistogramBuilder_ == null) {
if (((bitField0_ & 0x00000200) != 0) &&
rankHistogram_ != null &&
rankHistogram_ != org.tensorflow.metadata.v0.RankHistogram.getDefaultInstance()) {
getRankHistogramBuilder().mergeFrom(value);
} else {
rankHistogram_ = value;
}
} else {
rankHistogramBuilder_.mergeFrom(value);
}
if (rankHistogram_ != null) {
bitField0_ |= 0x00000200;
onChanged();
}
return this;
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
public Builder clearRankHistogram() {
bitField0_ = (bitField0_ & ~0x00000200);
rankHistogram_ = null;
if (rankHistogramBuilder_ != null) {
rankHistogramBuilder_.dispose();
rankHistogramBuilder_ = null;
}
onChanged();
return this;
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
public org.tensorflow.metadata.v0.RankHistogram.Builder getRankHistogramBuilder() {
bitField0_ |= 0x00000200;
onChanged();
return getRankHistogramFieldBuilder().getBuilder();
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
public org.tensorflow.metadata.v0.RankHistogramOrBuilder getRankHistogramOrBuilder() {
if (rankHistogramBuilder_ != null) {
return rankHistogramBuilder_.getMessageOrBuilder();
} else {
return rankHistogram_ == null ?
org.tensorflow.metadata.v0.RankHistogram.getDefaultInstance() : rankHistogram_;
}
}
/**
*
* The rank histogram for the tokens of the feature.
* The rank is used to measure of how commonly the token is found in the
* dataset. The most common token would have a rank of 1, with the second-most
* common value having a rank of 2, and so on.
*
*
* .tensorflow.metadata.v0.RankHistogram rank_histogram = 7;
*/
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.RankHistogram, org.tensorflow.metadata.v0.RankHistogram.Builder, org.tensorflow.metadata.v0.RankHistogramOrBuilder>
getRankHistogramFieldBuilder() {
if (rankHistogramBuilder_ == null) {
rankHistogramBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.RankHistogram, org.tensorflow.metadata.v0.RankHistogram.Builder, org.tensorflow.metadata.v0.RankHistogramOrBuilder>(
getRankHistogram(),
getParentForChildren(),
isClean());
rankHistogram_ = null;
}
return rankHistogramBuilder_;
}
private org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weightedNlStatistics_;
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics, org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.Builder, org.tensorflow.metadata.v0.WeightedNaturalLanguageStatisticsOrBuilder> weightedNlStatisticsBuilder_;
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
* @return Whether the weightedNlStatistics field is set.
*/
public boolean hasWeightedNlStatistics() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
* @return The weightedNlStatistics.
*/
public org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics getWeightedNlStatistics() {
if (weightedNlStatisticsBuilder_ == null) {
return weightedNlStatistics_ == null ? org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.getDefaultInstance() : weightedNlStatistics_;
} else {
return weightedNlStatisticsBuilder_.getMessage();
}
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
public Builder setWeightedNlStatistics(org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics value) {
if (weightedNlStatisticsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
weightedNlStatistics_ = value;
} else {
weightedNlStatisticsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
public Builder setWeightedNlStatistics(
org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.Builder builderForValue) {
if (weightedNlStatisticsBuilder_ == null) {
weightedNlStatistics_ = builderForValue.build();
} else {
weightedNlStatisticsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000400;
onChanged();
return this;
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
public Builder mergeWeightedNlStatistics(org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics value) {
if (weightedNlStatisticsBuilder_ == null) {
if (((bitField0_ & 0x00000400) != 0) &&
weightedNlStatistics_ != null &&
weightedNlStatistics_ != org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.getDefaultInstance()) {
getWeightedNlStatisticsBuilder().mergeFrom(value);
} else {
weightedNlStatistics_ = value;
}
} else {
weightedNlStatisticsBuilder_.mergeFrom(value);
}
if (weightedNlStatistics_ != null) {
bitField0_ |= 0x00000400;
onChanged();
}
return this;
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
public Builder clearWeightedNlStatistics() {
bitField0_ = (bitField0_ & ~0x00000400);
weightedNlStatistics_ = null;
if (weightedNlStatisticsBuilder_ != null) {
weightedNlStatisticsBuilder_.dispose();
weightedNlStatisticsBuilder_ = null;
}
onChanged();
return this;
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
public org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.Builder getWeightedNlStatisticsBuilder() {
bitField0_ |= 0x00000400;
onChanged();
return getWeightedNlStatisticsFieldBuilder().getBuilder();
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
public org.tensorflow.metadata.v0.WeightedNaturalLanguageStatisticsOrBuilder getWeightedNlStatisticsOrBuilder() {
if (weightedNlStatisticsBuilder_ != null) {
return weightedNlStatisticsBuilder_.getMessageOrBuilder();
} else {
return weightedNlStatistics_ == null ?
org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.getDefaultInstance() : weightedNlStatistics_;
}
}
/**
* .tensorflow.metadata.v0.WeightedNaturalLanguageStatistics weighted_nl_statistics = 8;
*/
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics, org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.Builder, org.tensorflow.metadata.v0.WeightedNaturalLanguageStatisticsOrBuilder>
getWeightedNlStatisticsFieldBuilder() {
if (weightedNlStatisticsBuilder_ == null) {
weightedNlStatisticsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics, org.tensorflow.metadata.v0.WeightedNaturalLanguageStatistics.Builder, org.tensorflow.metadata.v0.WeightedNaturalLanguageStatisticsOrBuilder>(
getWeightedNlStatistics(),
getParentForChildren(),
isClean());
weightedNlStatistics_ = null;
}
return weightedNlStatisticsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.metadata.v0.NaturalLanguageStatistics)
}
// @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.NaturalLanguageStatistics)
private static final org.tensorflow.metadata.v0.NaturalLanguageStatistics DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.metadata.v0.NaturalLanguageStatistics();
}
public static org.tensorflow.metadata.v0.NaturalLanguageStatistics getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() {
@java.lang.Override
public NaturalLanguageStatistics parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.tensorflow.metadata.v0.NaturalLanguageStatistics getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy