All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.tensorflow.framework.Summary Maven / Gradle / Ivy

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: tensorflow/core/framework/summary.proto

package org.tensorflow.framework;

/**
 * 
 * A Summary is a set of named values to be displayed by the
 * visualizer.
 * Summaries are produced regularly during training, as controlled by
 * the "summary_interval_secs" attribute of the training operation.
 * Summaries are also produced at the end of an evaluation.
 * 
* * Protobuf type {@code tensorflow.Summary} */ public final class Summary extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.Summary) SummaryOrBuilder { // Use Summary.newBuilder() to construct. private Summary(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private Summary() { value_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private Summary( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { value_ = new java.util.ArrayList(); mutable_bitField0_ |= 0x00000001; } value_.add( input.readMessage(org.tensorflow.framework.Summary.Value.parser(), extensionRegistry)); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { value_ = java.util.Collections.unmodifiableList(value_); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.framework.Summary.class, org.tensorflow.framework.Summary.Builder.class); } public interface ImageOrBuilder extends // @@protoc_insertion_point(interface_extends:tensorflow.Summary.Image) com.google.protobuf.MessageOrBuilder { /** *
     * Dimensions of the image.
     * 
* * int32 height = 1; */ int getHeight(); /** * int32 width = 2; */ int getWidth(); /** *
     * Valid colorspace values are
     *   1 - grayscale
     *   2 - grayscale + alpha
     *   3 - RGB
     *   4 - RGBA
     *   5 - DIGITAL_YUV
     *   6 - BGRA
     * 
* * int32 colorspace = 3; */ int getColorspace(); /** *
     * Image data in encoded format.  All image formats supported by
     * image_codec::CoderUtil can be stored here.
     * 
* * bytes encoded_image_string = 4; */ com.google.protobuf.ByteString getEncodedImageString(); } /** * Protobuf type {@code tensorflow.Summary.Image} */ public static final class Image extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.Summary.Image) ImageOrBuilder { // Use Image.newBuilder() to construct. private Image(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private Image() { height_ = 0; width_ = 0; colorspace_ = 0; encodedImageString_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private Image( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 8: { height_ = input.readInt32(); break; } case 16: { width_ = input.readInt32(); break; } case 24: { colorspace_ = input.readInt32(); break; } case 34: { encodedImageString_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.framework.Summary.Image.class, org.tensorflow.framework.Summary.Image.Builder.class); } public static final int HEIGHT_FIELD_NUMBER = 1; private int height_; /** *
     * Dimensions of the image.
     * 
* * int32 height = 1; */ public int getHeight() { return height_; } public static final int WIDTH_FIELD_NUMBER = 2; private int width_; /** * int32 width = 2; */ public int getWidth() { return width_; } public static final int COLORSPACE_FIELD_NUMBER = 3; private int colorspace_; /** *
     * Valid colorspace values are
     *   1 - grayscale
     *   2 - grayscale + alpha
     *   3 - RGB
     *   4 - RGBA
     *   5 - DIGITAL_YUV
     *   6 - BGRA
     * 
* * int32 colorspace = 3; */ public int getColorspace() { return colorspace_; } public static final int ENCODED_IMAGE_STRING_FIELD_NUMBER = 4; private com.google.protobuf.ByteString encodedImageString_; /** *
     * Image data in encoded format.  All image formats supported by
     * image_codec::CoderUtil can be stored here.
     * 
* * bytes encoded_image_string = 4; */ public com.google.protobuf.ByteString getEncodedImageString() { return encodedImageString_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (height_ != 0) { output.writeInt32(1, height_); } if (width_ != 0) { output.writeInt32(2, width_); } if (colorspace_ != 0) { output.writeInt32(3, colorspace_); } if (!encodedImageString_.isEmpty()) { output.writeBytes(4, encodedImageString_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (height_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, height_); } if (width_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, width_); } if (colorspace_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, colorspace_); } if (!encodedImageString_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, encodedImageString_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.framework.Summary.Image)) { return super.equals(obj); } org.tensorflow.framework.Summary.Image other = (org.tensorflow.framework.Summary.Image) obj; boolean result = true; result = result && (getHeight() == other.getHeight()); result = result && (getWidth() == other.getWidth()); result = result && (getColorspace() == other.getColorspace()); result = result && getEncodedImageString() .equals(other.getEncodedImageString()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + HEIGHT_FIELD_NUMBER; hash = (53 * hash) + getHeight(); hash = (37 * hash) + WIDTH_FIELD_NUMBER; hash = (53 * hash) + getWidth(); hash = (37 * hash) + COLORSPACE_FIELD_NUMBER; hash = (53 * hash) + getColorspace(); hash = (37 * hash) + ENCODED_IMAGE_STRING_FIELD_NUMBER; hash = (53 * hash) + getEncodedImageString().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.framework.Summary.Image parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.framework.Summary.Image parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.framework.Summary.Image parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.framework.Summary.Image parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.framework.Summary.Image parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.framework.Summary.Image parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.framework.Summary.Image parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.framework.Summary.Image parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.framework.Summary.Image parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.framework.Summary.Image parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.framework.Summary.Image parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.framework.Summary.Image parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.framework.Summary.Image prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code tensorflow.Summary.Image} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:tensorflow.Summary.Image) org.tensorflow.framework.Summary.ImageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.framework.Summary.Image.class, org.tensorflow.framework.Summary.Image.Builder.class); } // Construct using org.tensorflow.framework.Summary.Image.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); height_ = 0; width_ = 0; colorspace_ = 0; encodedImageString_ = com.google.protobuf.ByteString.EMPTY; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_descriptor; } public org.tensorflow.framework.Summary.Image getDefaultInstanceForType() { return org.tensorflow.framework.Summary.Image.getDefaultInstance(); } public org.tensorflow.framework.Summary.Image build() { org.tensorflow.framework.Summary.Image result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.tensorflow.framework.Summary.Image buildPartial() { org.tensorflow.framework.Summary.Image result = new org.tensorflow.framework.Summary.Image(this); result.height_ = height_; result.width_ = width_; result.colorspace_ = colorspace_; result.encodedImageString_ = encodedImageString_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.framework.Summary.Image) { return mergeFrom((org.tensorflow.framework.Summary.Image)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.framework.Summary.Image other) { if (other == org.tensorflow.framework.Summary.Image.getDefaultInstance()) return this; if (other.getHeight() != 0) { setHeight(other.getHeight()); } if (other.getWidth() != 0) { setWidth(other.getWidth()); } if (other.getColorspace() != 0) { setColorspace(other.getColorspace()); } if (other.getEncodedImageString() != com.google.protobuf.ByteString.EMPTY) { setEncodedImageString(other.getEncodedImageString()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.tensorflow.framework.Summary.Image parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.tensorflow.framework.Summary.Image) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int height_ ; /** *
       * Dimensions of the image.
       * 
* * int32 height = 1; */ public int getHeight() { return height_; } /** *
       * Dimensions of the image.
       * 
* * int32 height = 1; */ public Builder setHeight(int value) { height_ = value; onChanged(); return this; } /** *
       * Dimensions of the image.
       * 
* * int32 height = 1; */ public Builder clearHeight() { height_ = 0; onChanged(); return this; } private int width_ ; /** * int32 width = 2; */ public int getWidth() { return width_; } /** * int32 width = 2; */ public Builder setWidth(int value) { width_ = value; onChanged(); return this; } /** * int32 width = 2; */ public Builder clearWidth() { width_ = 0; onChanged(); return this; } private int colorspace_ ; /** *
       * Valid colorspace values are
       *   1 - grayscale
       *   2 - grayscale + alpha
       *   3 - RGB
       *   4 - RGBA
       *   5 - DIGITAL_YUV
       *   6 - BGRA
       * 
* * int32 colorspace = 3; */ public int getColorspace() { return colorspace_; } /** *
       * Valid colorspace values are
       *   1 - grayscale
       *   2 - grayscale + alpha
       *   3 - RGB
       *   4 - RGBA
       *   5 - DIGITAL_YUV
       *   6 - BGRA
       * 
* * int32 colorspace = 3; */ public Builder setColorspace(int value) { colorspace_ = value; onChanged(); return this; } /** *
       * Valid colorspace values are
       *   1 - grayscale
       *   2 - grayscale + alpha
       *   3 - RGB
       *   4 - RGBA
       *   5 - DIGITAL_YUV
       *   6 - BGRA
       * 
* * int32 colorspace = 3; */ public Builder clearColorspace() { colorspace_ = 0; onChanged(); return this; } private com.google.protobuf.ByteString encodedImageString_ = com.google.protobuf.ByteString.EMPTY; /** *
       * Image data in encoded format.  All image formats supported by
       * image_codec::CoderUtil can be stored here.
       * 
* * bytes encoded_image_string = 4; */ public com.google.protobuf.ByteString getEncodedImageString() { return encodedImageString_; } /** *
       * Image data in encoded format.  All image formats supported by
       * image_codec::CoderUtil can be stored here.
       * 
* * bytes encoded_image_string = 4; */ public Builder setEncodedImageString(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } encodedImageString_ = value; onChanged(); return this; } /** *
       * Image data in encoded format.  All image formats supported by
       * image_codec::CoderUtil can be stored here.
       * 
* * bytes encoded_image_string = 4; */ public Builder clearEncodedImageString() { encodedImageString_ = getDefaultInstance().getEncodedImageString(); onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:tensorflow.Summary.Image) } // @@protoc_insertion_point(class_scope:tensorflow.Summary.Image) private static final org.tensorflow.framework.Summary.Image DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.framework.Summary.Image(); } public static org.tensorflow.framework.Summary.Image getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { public Image parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Image(input, extensionRegistry); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } public org.tensorflow.framework.Summary.Image getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface AudioOrBuilder extends // @@protoc_insertion_point(interface_extends:tensorflow.Summary.Audio) com.google.protobuf.MessageOrBuilder { /** *
     * Sample rate of the audio in Hz.
     * 
* * float sample_rate = 1; */ float getSampleRate(); /** *
     * Number of channels of audio.
     * 
* * int64 num_channels = 2; */ long getNumChannels(); /** *
     * Length of the audio in frames (samples per channel).
     * 
* * int64 length_frames = 3; */ long getLengthFrames(); /** *
     * Encoded audio data and its associated RFC 2045 content type (e.g.
     * "audio/wav").
     * 
* * bytes encoded_audio_string = 4; */ com.google.protobuf.ByteString getEncodedAudioString(); /** * string content_type = 5; */ java.lang.String getContentType(); /** * string content_type = 5; */ com.google.protobuf.ByteString getContentTypeBytes(); } /** * Protobuf type {@code tensorflow.Summary.Audio} */ public static final class Audio extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.Summary.Audio) AudioOrBuilder { // Use Audio.newBuilder() to construct. private Audio(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private Audio() { sampleRate_ = 0F; numChannels_ = 0L; lengthFrames_ = 0L; encodedAudioString_ = com.google.protobuf.ByteString.EMPTY; contentType_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private Audio( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 13: { sampleRate_ = input.readFloat(); break; } case 16: { numChannels_ = input.readInt64(); break; } case 24: { lengthFrames_ = input.readInt64(); break; } case 34: { encodedAudioString_ = input.readBytes(); break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); contentType_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.framework.Summary.Audio.class, org.tensorflow.framework.Summary.Audio.Builder.class); } public static final int SAMPLE_RATE_FIELD_NUMBER = 1; private float sampleRate_; /** *
     * Sample rate of the audio in Hz.
     * 
* * float sample_rate = 1; */ public float getSampleRate() { return sampleRate_; } public static final int NUM_CHANNELS_FIELD_NUMBER = 2; private long numChannels_; /** *
     * Number of channels of audio.
     * 
* * int64 num_channels = 2; */ public long getNumChannels() { return numChannels_; } public static final int LENGTH_FRAMES_FIELD_NUMBER = 3; private long lengthFrames_; /** *
     * Length of the audio in frames (samples per channel).
     * 
* * int64 length_frames = 3; */ public long getLengthFrames() { return lengthFrames_; } public static final int ENCODED_AUDIO_STRING_FIELD_NUMBER = 4; private com.google.protobuf.ByteString encodedAudioString_; /** *
     * Encoded audio data and its associated RFC 2045 content type (e.g.
     * "audio/wav").
     * 
* * bytes encoded_audio_string = 4; */ public com.google.protobuf.ByteString getEncodedAudioString() { return encodedAudioString_; } public static final int CONTENT_TYPE_FIELD_NUMBER = 5; private volatile java.lang.Object contentType_; /** * string content_type = 5; */ public java.lang.String getContentType() { java.lang.Object ref = contentType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); contentType_ = s; return s; } } /** * string content_type = 5; */ public com.google.protobuf.ByteString getContentTypeBytes() { java.lang.Object ref = contentType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); contentType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sampleRate_ != 0F) { output.writeFloat(1, sampleRate_); } if (numChannels_ != 0L) { output.writeInt64(2, numChannels_); } if (lengthFrames_ != 0L) { output.writeInt64(3, lengthFrames_); } if (!encodedAudioString_.isEmpty()) { output.writeBytes(4, encodedAudioString_); } if (!getContentTypeBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, contentType_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sampleRate_ != 0F) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(1, sampleRate_); } if (numChannels_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(2, numChannels_); } if (lengthFrames_ != 0L) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(3, lengthFrames_); } if (!encodedAudioString_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, encodedAudioString_); } if (!getContentTypeBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, contentType_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.framework.Summary.Audio)) { return super.equals(obj); } org.tensorflow.framework.Summary.Audio other = (org.tensorflow.framework.Summary.Audio) obj; boolean result = true; result = result && ( java.lang.Float.floatToIntBits(getSampleRate()) == java.lang.Float.floatToIntBits( other.getSampleRate())); result = result && (getNumChannels() == other.getNumChannels()); result = result && (getLengthFrames() == other.getLengthFrames()); result = result && getEncodedAudioString() .equals(other.getEncodedAudioString()); result = result && getContentType() .equals(other.getContentType()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SAMPLE_RATE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getSampleRate()); hash = (37 * hash) + NUM_CHANNELS_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getNumChannels()); hash = (37 * hash) + LENGTH_FRAMES_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getLengthFrames()); hash = (37 * hash) + ENCODED_AUDIO_STRING_FIELD_NUMBER; hash = (53 * hash) + getEncodedAudioString().hashCode(); hash = (37 * hash) + CONTENT_TYPE_FIELD_NUMBER; hash = (53 * hash) + getContentType().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.framework.Summary.Audio parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.framework.Summary.Audio parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.framework.Summary.Audio parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.framework.Summary.Audio parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.framework.Summary.Audio parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.framework.Summary.Audio parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.framework.Summary.Audio parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.framework.Summary.Audio parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.framework.Summary.Audio parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.framework.Summary.Audio parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.framework.Summary.Audio parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.framework.Summary.Audio parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.framework.Summary.Audio prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code tensorflow.Summary.Audio} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:tensorflow.Summary.Audio) org.tensorflow.framework.Summary.AudioOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.framework.Summary.Audio.class, org.tensorflow.framework.Summary.Audio.Builder.class); } // Construct using org.tensorflow.framework.Summary.Audio.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); sampleRate_ = 0F; numChannels_ = 0L; lengthFrames_ = 0L; encodedAudioString_ = com.google.protobuf.ByteString.EMPTY; contentType_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_descriptor; } public org.tensorflow.framework.Summary.Audio getDefaultInstanceForType() { return org.tensorflow.framework.Summary.Audio.getDefaultInstance(); } public org.tensorflow.framework.Summary.Audio build() { org.tensorflow.framework.Summary.Audio result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.tensorflow.framework.Summary.Audio buildPartial() { org.tensorflow.framework.Summary.Audio result = new org.tensorflow.framework.Summary.Audio(this); result.sampleRate_ = sampleRate_; result.numChannels_ = numChannels_; result.lengthFrames_ = lengthFrames_; result.encodedAudioString_ = encodedAudioString_; result.contentType_ = contentType_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.framework.Summary.Audio) { return mergeFrom((org.tensorflow.framework.Summary.Audio)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.framework.Summary.Audio other) { if (other == org.tensorflow.framework.Summary.Audio.getDefaultInstance()) return this; if (other.getSampleRate() != 0F) { setSampleRate(other.getSampleRate()); } if (other.getNumChannels() != 0L) { setNumChannels(other.getNumChannels()); } if (other.getLengthFrames() != 0L) { setLengthFrames(other.getLengthFrames()); } if (other.getEncodedAudioString() != com.google.protobuf.ByteString.EMPTY) { setEncodedAudioString(other.getEncodedAudioString()); } if (!other.getContentType().isEmpty()) { contentType_ = other.contentType_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.tensorflow.framework.Summary.Audio parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.tensorflow.framework.Summary.Audio) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private float sampleRate_ ; /** *
       * Sample rate of the audio in Hz.
       * 
* * float sample_rate = 1; */ public float getSampleRate() { return sampleRate_; } /** *
       * Sample rate of the audio in Hz.
       * 
* * float sample_rate = 1; */ public Builder setSampleRate(float value) { sampleRate_ = value; onChanged(); return this; } /** *
       * Sample rate of the audio in Hz.
       * 
* * float sample_rate = 1; */ public Builder clearSampleRate() { sampleRate_ = 0F; onChanged(); return this; } private long numChannels_ ; /** *
       * Number of channels of audio.
       * 
* * int64 num_channels = 2; */ public long getNumChannels() { return numChannels_; } /** *
       * Number of channels of audio.
       * 
* * int64 num_channels = 2; */ public Builder setNumChannels(long value) { numChannels_ = value; onChanged(); return this; } /** *
       * Number of channels of audio.
       * 
* * int64 num_channels = 2; */ public Builder clearNumChannels() { numChannels_ = 0L; onChanged(); return this; } private long lengthFrames_ ; /** *
       * Length of the audio in frames (samples per channel).
       * 
* * int64 length_frames = 3; */ public long getLengthFrames() { return lengthFrames_; } /** *
       * Length of the audio in frames (samples per channel).
       * 
* * int64 length_frames = 3; */ public Builder setLengthFrames(long value) { lengthFrames_ = value; onChanged(); return this; } /** *
       * Length of the audio in frames (samples per channel).
       * 
* * int64 length_frames = 3; */ public Builder clearLengthFrames() { lengthFrames_ = 0L; onChanged(); return this; } private com.google.protobuf.ByteString encodedAudioString_ = com.google.protobuf.ByteString.EMPTY; /** *
       * Encoded audio data and its associated RFC 2045 content type (e.g.
       * "audio/wav").
       * 
* * bytes encoded_audio_string = 4; */ public com.google.protobuf.ByteString getEncodedAudioString() { return encodedAudioString_; } /** *
       * Encoded audio data and its associated RFC 2045 content type (e.g.
       * "audio/wav").
       * 
* * bytes encoded_audio_string = 4; */ public Builder setEncodedAudioString(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } encodedAudioString_ = value; onChanged(); return this; } /** *
       * Encoded audio data and its associated RFC 2045 content type (e.g.
       * "audio/wav").
       * 
* * bytes encoded_audio_string = 4; */ public Builder clearEncodedAudioString() { encodedAudioString_ = getDefaultInstance().getEncodedAudioString(); onChanged(); return this; } private java.lang.Object contentType_ = ""; /** * string content_type = 5; */ public java.lang.String getContentType() { java.lang.Object ref = contentType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); contentType_ = s; return s; } else { return (java.lang.String) ref; } } /** * string content_type = 5; */ public com.google.protobuf.ByteString getContentTypeBytes() { java.lang.Object ref = contentType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); contentType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * string content_type = 5; */ public Builder setContentType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } contentType_ = value; onChanged(); return this; } /** * string content_type = 5; */ public Builder clearContentType() { contentType_ = getDefaultInstance().getContentType(); onChanged(); return this; } /** * string content_type = 5; */ public Builder setContentTypeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); contentType_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:tensorflow.Summary.Audio) } // @@protoc_insertion_point(class_scope:tensorflow.Summary.Audio) private static final org.tensorflow.framework.Summary.Audio DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.framework.Summary.Audio(); } public static org.tensorflow.framework.Summary.Audio getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() { public Summary parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Summary(input, extensionRegistry); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } public org.tensorflow.framework.Summary getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy