All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.hederahashgraph.api.proto.java.RunningHashes Maven / Gradle / Ivy

The newest version!
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: state/blockrecords/running_hashes.proto

package com.hederahashgraph.api.proto.java;

/**
 * 
 **
 * The running hash of a transaction records and the previous 3 running hashes. All hashes are 48 bytes SHA384 hashes. If the
 * running hashes do not exist yet then they will be default values witch is empty bytes object or zero length byte array.
 * 
* * Protobuf type {@code proto.RunningHashes} */ public final class RunningHashes extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:proto.RunningHashes) RunningHashesOrBuilder { private static final long serialVersionUID = 0L; // Use RunningHashes.newBuilder() to construct. private RunningHashes(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } private RunningHashes() { runningHash_ = com.google.protobuf.ByteString.EMPTY; nMinus1RunningHash_ = com.google.protobuf.ByteString.EMPTY; nMinus2RunningHash_ = com.google.protobuf.ByteString.EMPTY; nMinus3RunningHash_ = com.google.protobuf.ByteString.EMPTY; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new RunningHashes(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RunningHashes( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { runningHash_ = input.readBytes(); break; } case 18: { nMinus1RunningHash_ = input.readBytes(); break; } case 26: { nMinus2RunningHash_ = input.readBytes(); break; } case 34: { nMinus3RunningHash_ = input.readBytes(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.hederahashgraph.api.proto.java.RunningHashesOuterClass.internal_static_proto_RunningHashes_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.hederahashgraph.api.proto.java.RunningHashesOuterClass.internal_static_proto_RunningHashes_fieldAccessorTable .ensureFieldAccessorsInitialized( com.hederahashgraph.api.proto.java.RunningHashes.class, com.hederahashgraph.api.proto.java.RunningHashes.Builder.class); } public static final int RUNNING_HASH_FIELD_NUMBER = 1; private com.google.protobuf.ByteString runningHash_; /** *
   **
   * A running hash of all record stream items
   * 
* * bytes running_hash = 1; * @return The runningHash. */ @java.lang.Override public com.google.protobuf.ByteString getRunningHash() { return runningHash_; } public static final int N_MINUS_1_RUNNING_HASH_FIELD_NUMBER = 2; private com.google.protobuf.ByteString nMinus1RunningHash_; /** *
   **
   * The previous running hash of all record stream items
   * 
* * bytes n_minus_1_running_hash = 2; * @return The nMinus1RunningHash. */ @java.lang.Override public com.google.protobuf.ByteString getNMinus1RunningHash() { return nMinus1RunningHash_; } public static final int N_MINUS_2_RUNNING_HASH_FIELD_NUMBER = 3; private com.google.protobuf.ByteString nMinus2RunningHash_; /** *
   **
   * The previous, previous running hash of all record stream items
   * 
* * bytes n_minus_2_running_hash = 3; * @return The nMinus2RunningHash. */ @java.lang.Override public com.google.protobuf.ByteString getNMinus2RunningHash() { return nMinus2RunningHash_; } public static final int N_MINUS_3_RUNNING_HASH_FIELD_NUMBER = 4; private com.google.protobuf.ByteString nMinus3RunningHash_; /** *
   **
   * The previous, previous, previous running hash of all record stream items
   * 
* * bytes n_minus_3_running_hash = 4; * @return The nMinus3RunningHash. */ @java.lang.Override public com.google.protobuf.ByteString getNMinus3RunningHash() { return nMinus3RunningHash_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!runningHash_.isEmpty()) { output.writeBytes(1, runningHash_); } if (!nMinus1RunningHash_.isEmpty()) { output.writeBytes(2, nMinus1RunningHash_); } if (!nMinus2RunningHash_.isEmpty()) { output.writeBytes(3, nMinus2RunningHash_); } if (!nMinus3RunningHash_.isEmpty()) { output.writeBytes(4, nMinus3RunningHash_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!runningHash_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, runningHash_); } if (!nMinus1RunningHash_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, nMinus1RunningHash_); } if (!nMinus2RunningHash_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, nMinus2RunningHash_); } if (!nMinus3RunningHash_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(4, nMinus3RunningHash_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.hederahashgraph.api.proto.java.RunningHashes)) { return super.equals(obj); } com.hederahashgraph.api.proto.java.RunningHashes other = (com.hederahashgraph.api.proto.java.RunningHashes) obj; if (!getRunningHash() .equals(other.getRunningHash())) return false; if (!getNMinus1RunningHash() .equals(other.getNMinus1RunningHash())) return false; if (!getNMinus2RunningHash() .equals(other.getNMinus2RunningHash())) return false; if (!getNMinus3RunningHash() .equals(other.getNMinus3RunningHash())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RUNNING_HASH_FIELD_NUMBER; hash = (53 * hash) + getRunningHash().hashCode(); hash = (37 * hash) + N_MINUS_1_RUNNING_HASH_FIELD_NUMBER; hash = (53 * hash) + getNMinus1RunningHash().hashCode(); hash = (37 * hash) + N_MINUS_2_RUNNING_HASH_FIELD_NUMBER; hash = (53 * hash) + getNMinus2RunningHash().hashCode(); hash = (37 * hash) + N_MINUS_3_RUNNING_HASH_FIELD_NUMBER; hash = (53 * hash) + getNMinus3RunningHash().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.hederahashgraph.api.proto.java.RunningHashes parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.hederahashgraph.api.proto.java.RunningHashes parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.hederahashgraph.api.proto.java.RunningHashes parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.hederahashgraph.api.proto.java.RunningHashes prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** *
   **
   * The running hash of a transaction records and the previous 3 running hashes. All hashes are 48 bytes SHA384 hashes. If the
   * running hashes do not exist yet then they will be default values witch is empty bytes object or zero length byte array.
   * 
* * Protobuf type {@code proto.RunningHashes} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements // @@protoc_insertion_point(builder_implements:proto.RunningHashes) com.hederahashgraph.api.proto.java.RunningHashesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.hederahashgraph.api.proto.java.RunningHashesOuterClass.internal_static_proto_RunningHashes_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.hederahashgraph.api.proto.java.RunningHashesOuterClass.internal_static_proto_RunningHashes_fieldAccessorTable .ensureFieldAccessorsInitialized( com.hederahashgraph.api.proto.java.RunningHashes.class, com.hederahashgraph.api.proto.java.RunningHashes.Builder.class); } // Construct using com.hederahashgraph.api.proto.java.RunningHashes.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); runningHash_ = com.google.protobuf.ByteString.EMPTY; nMinus1RunningHash_ = com.google.protobuf.ByteString.EMPTY; nMinus2RunningHash_ = com.google.protobuf.ByteString.EMPTY; nMinus3RunningHash_ = com.google.protobuf.ByteString.EMPTY; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.hederahashgraph.api.proto.java.RunningHashesOuterClass.internal_static_proto_RunningHashes_descriptor; } @java.lang.Override public com.hederahashgraph.api.proto.java.RunningHashes getDefaultInstanceForType() { return com.hederahashgraph.api.proto.java.RunningHashes.getDefaultInstance(); } @java.lang.Override public com.hederahashgraph.api.proto.java.RunningHashes build() { com.hederahashgraph.api.proto.java.RunningHashes result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.hederahashgraph.api.proto.java.RunningHashes buildPartial() { com.hederahashgraph.api.proto.java.RunningHashes result = new com.hederahashgraph.api.proto.java.RunningHashes(this); result.runningHash_ = runningHash_; result.nMinus1RunningHash_ = nMinus1RunningHash_; result.nMinus2RunningHash_ = nMinus2RunningHash_; result.nMinus3RunningHash_ = nMinus3RunningHash_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.hederahashgraph.api.proto.java.RunningHashes) { return mergeFrom((com.hederahashgraph.api.proto.java.RunningHashes)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.hederahashgraph.api.proto.java.RunningHashes other) { if (other == com.hederahashgraph.api.proto.java.RunningHashes.getDefaultInstance()) return this; if (other.getRunningHash() != com.google.protobuf.ByteString.EMPTY) { setRunningHash(other.getRunningHash()); } if (other.getNMinus1RunningHash() != com.google.protobuf.ByteString.EMPTY) { setNMinus1RunningHash(other.getNMinus1RunningHash()); } if (other.getNMinus2RunningHash() != com.google.protobuf.ByteString.EMPTY) { setNMinus2RunningHash(other.getNMinus2RunningHash()); } if (other.getNMinus3RunningHash() != com.google.protobuf.ByteString.EMPTY) { setNMinus3RunningHash(other.getNMinus3RunningHash()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.hederahashgraph.api.proto.java.RunningHashes parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.hederahashgraph.api.proto.java.RunningHashes) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.protobuf.ByteString runningHash_ = com.google.protobuf.ByteString.EMPTY; /** *
     **
     * A running hash of all record stream items
     * 
* * bytes running_hash = 1; * @return The runningHash. */ @java.lang.Override public com.google.protobuf.ByteString getRunningHash() { return runningHash_; } /** *
     **
     * A running hash of all record stream items
     * 
* * bytes running_hash = 1; * @param value The runningHash to set. * @return This builder for chaining. */ public Builder setRunningHash(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } runningHash_ = value; onChanged(); return this; } /** *
     **
     * A running hash of all record stream items
     * 
* * bytes running_hash = 1; * @return This builder for chaining. */ public Builder clearRunningHash() { runningHash_ = getDefaultInstance().getRunningHash(); onChanged(); return this; } private com.google.protobuf.ByteString nMinus1RunningHash_ = com.google.protobuf.ByteString.EMPTY; /** *
     **
     * The previous running hash of all record stream items
     * 
* * bytes n_minus_1_running_hash = 2; * @return The nMinus1RunningHash. */ @java.lang.Override public com.google.protobuf.ByteString getNMinus1RunningHash() { return nMinus1RunningHash_; } /** *
     **
     * The previous running hash of all record stream items
     * 
* * bytes n_minus_1_running_hash = 2; * @param value The nMinus1RunningHash to set. * @return This builder for chaining. */ public Builder setNMinus1RunningHash(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nMinus1RunningHash_ = value; onChanged(); return this; } /** *
     **
     * The previous running hash of all record stream items
     * 
* * bytes n_minus_1_running_hash = 2; * @return This builder for chaining. */ public Builder clearNMinus1RunningHash() { nMinus1RunningHash_ = getDefaultInstance().getNMinus1RunningHash(); onChanged(); return this; } private com.google.protobuf.ByteString nMinus2RunningHash_ = com.google.protobuf.ByteString.EMPTY; /** *
     **
     * The previous, previous running hash of all record stream items
     * 
* * bytes n_minus_2_running_hash = 3; * @return The nMinus2RunningHash. */ @java.lang.Override public com.google.protobuf.ByteString getNMinus2RunningHash() { return nMinus2RunningHash_; } /** *
     **
     * The previous, previous running hash of all record stream items
     * 
* * bytes n_minus_2_running_hash = 3; * @param value The nMinus2RunningHash to set. * @return This builder for chaining. */ public Builder setNMinus2RunningHash(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nMinus2RunningHash_ = value; onChanged(); return this; } /** *
     **
     * The previous, previous running hash of all record stream items
     * 
* * bytes n_minus_2_running_hash = 3; * @return This builder for chaining. */ public Builder clearNMinus2RunningHash() { nMinus2RunningHash_ = getDefaultInstance().getNMinus2RunningHash(); onChanged(); return this; } private com.google.protobuf.ByteString nMinus3RunningHash_ = com.google.protobuf.ByteString.EMPTY; /** *
     **
     * The previous, previous, previous running hash of all record stream items
     * 
* * bytes n_minus_3_running_hash = 4; * @return The nMinus3RunningHash. */ @java.lang.Override public com.google.protobuf.ByteString getNMinus3RunningHash() { return nMinus3RunningHash_; } /** *
     **
     * The previous, previous, previous running hash of all record stream items
     * 
* * bytes n_minus_3_running_hash = 4; * @param value The nMinus3RunningHash to set. * @return This builder for chaining. */ public Builder setNMinus3RunningHash(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } nMinus3RunningHash_ = value; onChanged(); return this; } /** *
     **
     * The previous, previous, previous running hash of all record stream items
     * 
* * bytes n_minus_3_running_hash = 4; * @return This builder for chaining. */ public Builder clearNMinus3RunningHash() { nMinus3RunningHash_ = getDefaultInstance().getNMinus3RunningHash(); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:proto.RunningHashes) } // @@protoc_insertion_point(class_scope:proto.RunningHashes) private static final com.hederahashgraph.api.proto.java.RunningHashes DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.hederahashgraph.api.proto.java.RunningHashes(); } public static com.hederahashgraph.api.proto.java.RunningHashes getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override public RunningHashes parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RunningHashes(input, extensionRegistry); } }; public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override public com.hederahashgraph.api.proto.java.RunningHashes getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy