All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.tensorflow.metadata.v0.Histogram Maven / Gradle / Ivy

The newest version!
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: tensorflow_metadata/proto/v0/statistics.proto

// Protobuf Java Version: 3.25.4
package org.tensorflow.metadata.v0;

/**
 * 
 * The data used to create a histogram of a numeric feature for a dataset.
 * 
* * Protobuf type {@code tensorflow.metadata.v0.Histogram} */ public final class Histogram extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.metadata.v0.Histogram) HistogramOrBuilder { private static final long serialVersionUID = 0L; // Use Histogram.newBuilder() to construct. private Histogram(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private Histogram() { buckets_ = java.util.Collections.emptyList(); type_ = 0; name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new Histogram(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.metadata.v0.Histogram.class, org.tensorflow.metadata.v0.Histogram.Builder.class); } /** *
   * The type of the histogram. A standard histogram has equal-width buckets.
   * The quantiles type is used for when the histogram message is used to store
   * quantile information (by using approximately equal-count buckets with
   * variable widths).
   * 
* * Protobuf enum {@code tensorflow.metadata.v0.Histogram.HistogramType} */ public enum HistogramType implements com.google.protobuf.ProtocolMessageEnum { /** * STANDARD = 0; */ STANDARD(0), /** * QUANTILES = 1; */ QUANTILES(1), UNRECOGNIZED(-1), ; /** * STANDARD = 0; */ public static final int STANDARD_VALUE = 0; /** * QUANTILES = 1; */ public static final int QUANTILES_VALUE = 1; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static HistogramType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static HistogramType forNumber(int value) { switch (value) { case 0: return STANDARD; case 1: return QUANTILES; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< HistogramType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap() { public HistogramType findValueByNumber(int number) { return HistogramType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.tensorflow.metadata.v0.Histogram.getDescriptor().getEnumTypes().get(0); } private static final HistogramType[] VALUES = values(); public static HistogramType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private HistogramType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:tensorflow.metadata.v0.Histogram.HistogramType) } public interface BucketOrBuilder extends // @@protoc_insertion_point(interface_extends:tensorflow.metadata.v0.Histogram.Bucket) com.google.protobuf.MessageOrBuilder { /** *
     * The low value of the bucket, exclusive except for the first bucket.
     * 
* * double low_value = 1; * @return The lowValue. */ double getLowValue(); /** *
     * The high value of the bucket, inclusive.
     * 
* * double high_value = 2; * @return The highValue. */ double getHighValue(); /** *
     * The number of items in the bucket. Stored as a double to be able to
     * handle weighted histograms.
     * 
* * double sample_count = 4; * @return The sampleCount. */ double getSampleCount(); } /** *
   * Each bucket defines its low and high values along with its count. The
   * low and high values must be a real number or positive or negative
   * infinity. They cannot be NaN or undefined. Counts of those special values
   * can be found in the numNaN and numUndefined fields.
   * 
* * Protobuf type {@code tensorflow.metadata.v0.Histogram.Bucket} */ public static final class Bucket extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.metadata.v0.Histogram.Bucket) BucketOrBuilder { private static final long serialVersionUID = 0L; // Use Bucket.newBuilder() to construct. private Bucket(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private Bucket() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new Bucket(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_Bucket_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_Bucket_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.metadata.v0.Histogram.Bucket.class, org.tensorflow.metadata.v0.Histogram.Bucket.Builder.class); } public static final int LOW_VALUE_FIELD_NUMBER = 1; private double lowValue_ = 0D; /** *
     * The low value of the bucket, exclusive except for the first bucket.
     * 
* * double low_value = 1; * @return The lowValue. */ @java.lang.Override public double getLowValue() { return lowValue_; } public static final int HIGH_VALUE_FIELD_NUMBER = 2; private double highValue_ = 0D; /** *
     * The high value of the bucket, inclusive.
     * 
* * double high_value = 2; * @return The highValue. */ @java.lang.Override public double getHighValue() { return highValue_; } public static final int SAMPLE_COUNT_FIELD_NUMBER = 4; private double sampleCount_ = 0D; /** *
     * The number of items in the bucket. Stored as a double to be able to
     * handle weighted histograms.
     * 
* * double sample_count = 4; * @return The sampleCount. */ @java.lang.Override public double getSampleCount() { return sampleCount_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (java.lang.Double.doubleToRawLongBits(lowValue_) != 0) { output.writeDouble(1, lowValue_); } if (java.lang.Double.doubleToRawLongBits(highValue_) != 0) { output.writeDouble(2, highValue_); } if (java.lang.Double.doubleToRawLongBits(sampleCount_) != 0) { output.writeDouble(4, sampleCount_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (java.lang.Double.doubleToRawLongBits(lowValue_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(1, lowValue_); } if (java.lang.Double.doubleToRawLongBits(highValue_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(2, highValue_); } if (java.lang.Double.doubleToRawLongBits(sampleCount_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(4, sampleCount_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.metadata.v0.Histogram.Bucket)) { return super.equals(obj); } org.tensorflow.metadata.v0.Histogram.Bucket other = (org.tensorflow.metadata.v0.Histogram.Bucket) obj; if (java.lang.Double.doubleToLongBits(getLowValue()) != java.lang.Double.doubleToLongBits( other.getLowValue())) return false; if (java.lang.Double.doubleToLongBits(getHighValue()) != java.lang.Double.doubleToLongBits( other.getHighValue())) return false; if (java.lang.Double.doubleToLongBits(getSampleCount()) != java.lang.Double.doubleToLongBits( other.getSampleCount())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + LOW_VALUE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getLowValue())); hash = (37 * hash) + HIGH_VALUE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getHighValue())); hash = (37 * hash) + SAMPLE_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getSampleCount())); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.metadata.v0.Histogram.Bucket parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.metadata.v0.Histogram.Bucket prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
     * Each bucket defines its low and high values along with its count. The
     * low and high values must be a real number or positive or negative
     * infinity. They cannot be NaN or undefined. Counts of those special values
     * can be found in the numNaN and numUndefined fields.
     * 
* * Protobuf type {@code tensorflow.metadata.v0.Histogram.Bucket} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:tensorflow.metadata.v0.Histogram.Bucket) org.tensorflow.metadata.v0.Histogram.BucketOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_Bucket_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_Bucket_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.metadata.v0.Histogram.Bucket.class, org.tensorflow.metadata.v0.Histogram.Bucket.Builder.class); } // Construct using org.tensorflow.metadata.v0.Histogram.Bucket.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; lowValue_ = 0D; highValue_ = 0D; sampleCount_ = 0D; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_Bucket_descriptor; } @java.lang.Override public org.tensorflow.metadata.v0.Histogram.Bucket getDefaultInstanceForType() { return org.tensorflow.metadata.v0.Histogram.Bucket.getDefaultInstance(); } @java.lang.Override public org.tensorflow.metadata.v0.Histogram.Bucket build() { org.tensorflow.metadata.v0.Histogram.Bucket result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.tensorflow.metadata.v0.Histogram.Bucket buildPartial() { org.tensorflow.metadata.v0.Histogram.Bucket result = new org.tensorflow.metadata.v0.Histogram.Bucket(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(org.tensorflow.metadata.v0.Histogram.Bucket result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.lowValue_ = lowValue_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.highValue_ = highValue_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.sampleCount_ = sampleCount_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.metadata.v0.Histogram.Bucket) { return mergeFrom((org.tensorflow.metadata.v0.Histogram.Bucket)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.metadata.v0.Histogram.Bucket other) { if (other == org.tensorflow.metadata.v0.Histogram.Bucket.getDefaultInstance()) return this; if (other.getLowValue() != 0D) { setLowValue(other.getLowValue()); } if (other.getHighValue() != 0D) { setHighValue(other.getHighValue()); } if (other.getSampleCount() != 0D) { setSampleCount(other.getSampleCount()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 9: { lowValue_ = input.readDouble(); bitField0_ |= 0x00000001; break; } // case 9 case 17: { highValue_ = input.readDouble(); bitField0_ |= 0x00000002; break; } // case 17 case 33: { sampleCount_ = input.readDouble(); bitField0_ |= 0x00000004; break; } // case 33 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private double lowValue_ ; /** *
       * The low value of the bucket, exclusive except for the first bucket.
       * 
* * double low_value = 1; * @return The lowValue. */ @java.lang.Override public double getLowValue() { return lowValue_; } /** *
       * The low value of the bucket, exclusive except for the first bucket.
       * 
* * double low_value = 1; * @param value The lowValue to set. * @return This builder for chaining. */ public Builder setLowValue(double value) { lowValue_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** *
       * The low value of the bucket, exclusive except for the first bucket.
       * 
* * double low_value = 1; * @return This builder for chaining. */ public Builder clearLowValue() { bitField0_ = (bitField0_ & ~0x00000001); lowValue_ = 0D; onChanged(); return this; } private double highValue_ ; /** *
       * The high value of the bucket, inclusive.
       * 
* * double high_value = 2; * @return The highValue. */ @java.lang.Override public double getHighValue() { return highValue_; } /** *
       * The high value of the bucket, inclusive.
       * 
* * double high_value = 2; * @param value The highValue to set. * @return This builder for chaining. */ public Builder setHighValue(double value) { highValue_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** *
       * The high value of the bucket, inclusive.
       * 
* * double high_value = 2; * @return This builder for chaining. */ public Builder clearHighValue() { bitField0_ = (bitField0_ & ~0x00000002); highValue_ = 0D; onChanged(); return this; } private double sampleCount_ ; /** *
       * The number of items in the bucket. Stored as a double to be able to
       * handle weighted histograms.
       * 
* * double sample_count = 4; * @return The sampleCount. */ @java.lang.Override public double getSampleCount() { return sampleCount_; } /** *
       * The number of items in the bucket. Stored as a double to be able to
       * handle weighted histograms.
       * 
* * double sample_count = 4; * @param value The sampleCount to set. * @return This builder for chaining. */ public Builder setSampleCount(double value) { sampleCount_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** *
       * The number of items in the bucket. Stored as a double to be able to
       * handle weighted histograms.
       * 
* * double sample_count = 4; * @return This builder for chaining. */ public Builder clearSampleCount() { bitField0_ = (bitField0_ & ~0x00000004); sampleCount_ = 0D; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:tensorflow.metadata.v0.Histogram.Bucket) } // @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Histogram.Bucket) private static final org.tensorflow.metadata.v0.Histogram.Bucket DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.metadata.v0.Histogram.Bucket(); } public static org.tensorflow.metadata.v0.Histogram.Bucket getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public Bucket parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.tensorflow.metadata.v0.Histogram.Bucket getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public static final int NUM_NAN_FIELD_NUMBER = 1; private long numNan_ = 0L; /** *
   * The number of NaN values in the dataset.
   * 
* * uint64 num_nan = 1; * @return The numNan. */ @java.lang.Override public long getNumNan() { return numNan_; } public static final int NUM_UNDEFINED_FIELD_NUMBER = 2; private long numUndefined_ = 0L; /** *
   * The number of undefined values in the dataset.
   * 
* * uint64 num_undefined = 2; * @return The numUndefined. */ @java.lang.Override public long getNumUndefined() { return numUndefined_; } public static final int BUCKETS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private java.util.List buckets_; /** *
   * A list of buckets in the histogram, sorted from lowest bucket to highest
   * bucket.
   * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ @java.lang.Override public java.util.List getBucketsList() { return buckets_; } /** *
   * A list of buckets in the histogram, sorted from lowest bucket to highest
   * bucket.
   * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ @java.lang.Override public java.util.List getBucketsOrBuilderList() { return buckets_; } /** *
   * A list of buckets in the histogram, sorted from lowest bucket to highest
   * bucket.
   * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ @java.lang.Override public int getBucketsCount() { return buckets_.size(); } /** *
   * A list of buckets in the histogram, sorted from lowest bucket to highest
   * bucket.
   * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ @java.lang.Override public org.tensorflow.metadata.v0.Histogram.Bucket getBuckets(int index) { return buckets_.get(index); } /** *
   * A list of buckets in the histogram, sorted from lowest bucket to highest
   * bucket.
   * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ @java.lang.Override public org.tensorflow.metadata.v0.Histogram.BucketOrBuilder getBucketsOrBuilder( int index) { return buckets_.get(index); } public static final int TYPE_FIELD_NUMBER = 4; private int type_ = 0; /** *
   * The type of the histogram.
   * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** *
   * The type of the histogram.
   * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @return The type. */ @java.lang.Override public org.tensorflow.metadata.v0.Histogram.HistogramType getType() { org.tensorflow.metadata.v0.Histogram.HistogramType result = org.tensorflow.metadata.v0.Histogram.HistogramType.forNumber(type_); return result == null ? org.tensorflow.metadata.v0.Histogram.HistogramType.UNRECOGNIZED : result; } public static final int NAME_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** *
   * An optional descriptive name of the histogram, to be used for labeling.
   * 
* * string name = 5; * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** *
   * An optional descriptive name of the histogram, to be used for labeling.
   * 
* * string name = 5; * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (numNan_ != 0L) { output.writeUInt64(1, numNan_); } if (numUndefined_ != 0L) { output.writeUInt64(2, numUndefined_); } for (int i = 0; i < buckets_.size(); i++) { output.writeMessage(3, buckets_.get(i)); } if (type_ != org.tensorflow.metadata.v0.Histogram.HistogramType.STANDARD.getNumber()) { output.writeEnum(4, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, name_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (numNan_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(1, numNan_); } if (numUndefined_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, numUndefined_); } for (int i = 0; i < buckets_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, buckets_.get(i)); } if (type_ != org.tensorflow.metadata.v0.Histogram.HistogramType.STANDARD.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, name_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.metadata.v0.Histogram)) { return super.equals(obj); } org.tensorflow.metadata.v0.Histogram other = (org.tensorflow.metadata.v0.Histogram) obj; if (getNumNan() != other.getNumNan()) return false; if (getNumUndefined() != other.getNumUndefined()) return false; if (!getBucketsList() .equals(other.getBucketsList())) return false; if (type_ != other.type_) return false; if (!getName() .equals(other.getName())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NUM_NAN_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getNumNan()); hash = (37 * hash) + NUM_UNDEFINED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getNumUndefined()); if (getBucketsCount() > 0) { hash = (37 * hash) + BUCKETS_FIELD_NUMBER; hash = (53 * hash) + getBucketsList().hashCode(); } hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.metadata.v0.Histogram parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.metadata.v0.Histogram parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.metadata.v0.Histogram parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.metadata.v0.Histogram parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.metadata.v0.Histogram parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.metadata.v0.Histogram parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.metadata.v0.Histogram parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.metadata.v0.Histogram parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.metadata.v0.Histogram prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
   * The data used to create a histogram of a numeric feature for a dataset.
   * 
* * Protobuf type {@code tensorflow.metadata.v0.Histogram} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:tensorflow.metadata.v0.Histogram) org.tensorflow.metadata.v0.HistogramOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.metadata.v0.Histogram.class, org.tensorflow.metadata.v0.Histogram.Builder.class); } // Construct using org.tensorflow.metadata.v0.Histogram.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; numNan_ = 0L; numUndefined_ = 0L; if (bucketsBuilder_ == null) { buckets_ = java.util.Collections.emptyList(); } else { buckets_ = null; bucketsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); type_ = 0; name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.metadata.v0.Statistics.internal_static_tensorflow_metadata_v0_Histogram_descriptor; } @java.lang.Override public org.tensorflow.metadata.v0.Histogram getDefaultInstanceForType() { return org.tensorflow.metadata.v0.Histogram.getDefaultInstance(); } @java.lang.Override public org.tensorflow.metadata.v0.Histogram build() { org.tensorflow.metadata.v0.Histogram result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public org.tensorflow.metadata.v0.Histogram buildPartial() { org.tensorflow.metadata.v0.Histogram result = new org.tensorflow.metadata.v0.Histogram(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(org.tensorflow.metadata.v0.Histogram result) { if (bucketsBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { buckets_ = java.util.Collections.unmodifiableList(buckets_); bitField0_ = (bitField0_ & ~0x00000004); } result.buckets_ = buckets_; } else { result.buckets_ = bucketsBuilder_.build(); } } private void buildPartial0(org.tensorflow.metadata.v0.Histogram result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.numNan_ = numNan_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.numUndefined_ = numUndefined_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.name_ = name_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.metadata.v0.Histogram) { return mergeFrom((org.tensorflow.metadata.v0.Histogram)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.metadata.v0.Histogram other) { if (other == org.tensorflow.metadata.v0.Histogram.getDefaultInstance()) return this; if (other.getNumNan() != 0L) { setNumNan(other.getNumNan()); } if (other.getNumUndefined() != 0L) { setNumUndefined(other.getNumUndefined()); } if (bucketsBuilder_ == null) { if (!other.buckets_.isEmpty()) { if (buckets_.isEmpty()) { buckets_ = other.buckets_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureBucketsIsMutable(); buckets_.addAll(other.buckets_); } onChanged(); } } else { if (!other.buckets_.isEmpty()) { if (bucketsBuilder_.isEmpty()) { bucketsBuilder_.dispose(); bucketsBuilder_ = null; buckets_ = other.buckets_; bitField0_ = (bitField0_ & ~0x00000004); bucketsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getBucketsFieldBuilder() : null; } else { bucketsBuilder_.addAllMessages(other.buckets_); } } } if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { numNan_ = input.readUInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { numUndefined_ = input.readUInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { org.tensorflow.metadata.v0.Histogram.Bucket m = input.readMessage( org.tensorflow.metadata.v0.Histogram.Bucket.parser(), extensionRegistry); if (bucketsBuilder_ == null) { ensureBucketsIsMutable(); buckets_.add(m); } else { bucketsBuilder_.addMessage(m); } break; } // case 26 case 32: { type_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 32 case 42: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long numNan_ ; /** *
     * The number of NaN values in the dataset.
     * 
* * uint64 num_nan = 1; * @return The numNan. */ @java.lang.Override public long getNumNan() { return numNan_; } /** *
     * The number of NaN values in the dataset.
     * 
* * uint64 num_nan = 1; * @param value The numNan to set. * @return This builder for chaining. */ public Builder setNumNan(long value) { numNan_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** *
     * The number of NaN values in the dataset.
     * 
* * uint64 num_nan = 1; * @return This builder for chaining. */ public Builder clearNumNan() { bitField0_ = (bitField0_ & ~0x00000001); numNan_ = 0L; onChanged(); return this; } private long numUndefined_ ; /** *
     * The number of undefined values in the dataset.
     * 
* * uint64 num_undefined = 2; * @return The numUndefined. */ @java.lang.Override public long getNumUndefined() { return numUndefined_; } /** *
     * The number of undefined values in the dataset.
     * 
* * uint64 num_undefined = 2; * @param value The numUndefined to set. * @return This builder for chaining. */ public Builder setNumUndefined(long value) { numUndefined_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** *
     * The number of undefined values in the dataset.
     * 
* * uint64 num_undefined = 2; * @return This builder for chaining. */ public Builder clearNumUndefined() { bitField0_ = (bitField0_ & ~0x00000002); numUndefined_ = 0L; onChanged(); return this; } private java.util.List buckets_ = java.util.Collections.emptyList(); private void ensureBucketsIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { buckets_ = new java.util.ArrayList(buckets_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilderV3< org.tensorflow.metadata.v0.Histogram.Bucket, org.tensorflow.metadata.v0.Histogram.Bucket.Builder, org.tensorflow.metadata.v0.Histogram.BucketOrBuilder> bucketsBuilder_; /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public java.util.List getBucketsList() { if (bucketsBuilder_ == null) { return java.util.Collections.unmodifiableList(buckets_); } else { return bucketsBuilder_.getMessageList(); } } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public int getBucketsCount() { if (bucketsBuilder_ == null) { return buckets_.size(); } else { return bucketsBuilder_.getCount(); } } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public org.tensorflow.metadata.v0.Histogram.Bucket getBuckets(int index) { if (bucketsBuilder_ == null) { return buckets_.get(index); } else { return bucketsBuilder_.getMessage(index); } } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder setBuckets( int index, org.tensorflow.metadata.v0.Histogram.Bucket value) { if (bucketsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBucketsIsMutable(); buckets_.set(index, value); onChanged(); } else { bucketsBuilder_.setMessage(index, value); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder setBuckets( int index, org.tensorflow.metadata.v0.Histogram.Bucket.Builder builderForValue) { if (bucketsBuilder_ == null) { ensureBucketsIsMutable(); buckets_.set(index, builderForValue.build()); onChanged(); } else { bucketsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder addBuckets(org.tensorflow.metadata.v0.Histogram.Bucket value) { if (bucketsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBucketsIsMutable(); buckets_.add(value); onChanged(); } else { bucketsBuilder_.addMessage(value); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder addBuckets( int index, org.tensorflow.metadata.v0.Histogram.Bucket value) { if (bucketsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBucketsIsMutable(); buckets_.add(index, value); onChanged(); } else { bucketsBuilder_.addMessage(index, value); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder addBuckets( org.tensorflow.metadata.v0.Histogram.Bucket.Builder builderForValue) { if (bucketsBuilder_ == null) { ensureBucketsIsMutable(); buckets_.add(builderForValue.build()); onChanged(); } else { bucketsBuilder_.addMessage(builderForValue.build()); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder addBuckets( int index, org.tensorflow.metadata.v0.Histogram.Bucket.Builder builderForValue) { if (bucketsBuilder_ == null) { ensureBucketsIsMutable(); buckets_.add(index, builderForValue.build()); onChanged(); } else { bucketsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder addAllBuckets( java.lang.Iterable values) { if (bucketsBuilder_ == null) { ensureBucketsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, buckets_); onChanged(); } else { bucketsBuilder_.addAllMessages(values); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder clearBuckets() { if (bucketsBuilder_ == null) { buckets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { bucketsBuilder_.clear(); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public Builder removeBuckets(int index) { if (bucketsBuilder_ == null) { ensureBucketsIsMutable(); buckets_.remove(index); onChanged(); } else { bucketsBuilder_.remove(index); } return this; } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public org.tensorflow.metadata.v0.Histogram.Bucket.Builder getBucketsBuilder( int index) { return getBucketsFieldBuilder().getBuilder(index); } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public org.tensorflow.metadata.v0.Histogram.BucketOrBuilder getBucketsOrBuilder( int index) { if (bucketsBuilder_ == null) { return buckets_.get(index); } else { return bucketsBuilder_.getMessageOrBuilder(index); } } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public java.util.List getBucketsOrBuilderList() { if (bucketsBuilder_ != null) { return bucketsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(buckets_); } } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public org.tensorflow.metadata.v0.Histogram.Bucket.Builder addBucketsBuilder() { return getBucketsFieldBuilder().addBuilder( org.tensorflow.metadata.v0.Histogram.Bucket.getDefaultInstance()); } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public org.tensorflow.metadata.v0.Histogram.Bucket.Builder addBucketsBuilder( int index) { return getBucketsFieldBuilder().addBuilder( index, org.tensorflow.metadata.v0.Histogram.Bucket.getDefaultInstance()); } /** *
     * A list of buckets in the histogram, sorted from lowest bucket to highest
     * bucket.
     * 
* * repeated .tensorflow.metadata.v0.Histogram.Bucket buckets = 3; */ public java.util.List getBucketsBuilderList() { return getBucketsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< org.tensorflow.metadata.v0.Histogram.Bucket, org.tensorflow.metadata.v0.Histogram.Bucket.Builder, org.tensorflow.metadata.v0.Histogram.BucketOrBuilder> getBucketsFieldBuilder() { if (bucketsBuilder_ == null) { bucketsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< org.tensorflow.metadata.v0.Histogram.Bucket, org.tensorflow.metadata.v0.Histogram.Bucket.Builder, org.tensorflow.metadata.v0.Histogram.BucketOrBuilder>( buckets_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); buckets_ = null; } return bucketsBuilder_; } private int type_ = 0; /** *
     * The type of the histogram.
     * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** *
     * The type of the histogram.
     * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** *
     * The type of the histogram.
     * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @return The type. */ @java.lang.Override public org.tensorflow.metadata.v0.Histogram.HistogramType getType() { org.tensorflow.metadata.v0.Histogram.HistogramType result = org.tensorflow.metadata.v0.Histogram.HistogramType.forNumber(type_); return result == null ? org.tensorflow.metadata.v0.Histogram.HistogramType.UNRECOGNIZED : result; } /** *
     * The type of the histogram.
     * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @param value The type to set. * @return This builder for chaining. */ public Builder setType(org.tensorflow.metadata.v0.Histogram.HistogramType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; type_ = value.getNumber(); onChanged(); return this; } /** *
     * The type of the histogram.
     * 
* * .tensorflow.metadata.v0.Histogram.HistogramType type = 4; * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000008); type_ = 0; onChanged(); return this; } private java.lang.Object name_ = ""; /** *
     * An optional descriptive name of the histogram, to be used for labeling.
     * 
* * string name = 5; * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** *
     * An optional descriptive name of the histogram, to be used for labeling.
     * 
* * string name = 5; * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** *
     * An optional descriptive name of the histogram, to be used for labeling.
     * 
* * string name = 5; * @param value The name to set. * @return This builder for chaining. */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** *
     * An optional descriptive name of the histogram, to be used for labeling.
     * 
* * string name = 5; * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** *
     * An optional descriptive name of the histogram, to be used for labeling.
     * 
* * string name = 5; * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:tensorflow.metadata.v0.Histogram) } // @@protoc_insertion_point(class_scope:tensorflow.metadata.v0.Histogram) private static final org.tensorflow.metadata.v0.Histogram DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.metadata.v0.Histogram(); } public static org.tensorflow.metadata.v0.Histogram getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public Histogram parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public org.tensorflow.metadata.v0.Histogram getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }




© 2015 - 2025 Weber Informatics LLC | Privacy Policy