Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tensorflow/core/framework/reader_base.proto
package org.tensorflow.framework;
/**
*
* For serializing and restoring the state of ReaderBase, see
* reader_base.h for details.
*
*
* Protobuf type {@code tensorflow.ReaderBaseState}
*/
public final class ReaderBaseState extends
com.github.os72.protobuf351.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.ReaderBaseState)
ReaderBaseStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReaderBaseState.newBuilder() to construct.
private ReaderBaseState(com.github.os72.protobuf351.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private ReaderBaseState() {
workStarted_ = 0L;
workFinished_ = 0L;
numRecordsProduced_ = 0L;
currentWork_ = com.github.os72.protobuf351.ByteString.EMPTY;
}
@java.lang.Override
public final com.github.os72.protobuf351.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ReaderBaseState(
com.github.os72.protobuf351.CodedInputStream input,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.github.os72.protobuf351.UnknownFieldSet.Builder unknownFields =
com.github.os72.protobuf351.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
workStarted_ = input.readInt64();
break;
}
case 16: {
workFinished_ = input.readInt64();
break;
}
case 24: {
numRecordsProduced_ = input.readInt64();
break;
}
case 34: {
currentWork_ = input.readBytes();
break;
}
}
}
} catch (com.github.os72.protobuf351.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.github.os72.protobuf351.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.github.os72.protobuf351.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ReaderBaseProtos.internal_static_tensorflow_ReaderBaseState_descriptor;
}
protected com.github.os72.protobuf351.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ReaderBaseProtos.internal_static_tensorflow_ReaderBaseState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ReaderBaseState.class, org.tensorflow.framework.ReaderBaseState.Builder.class);
}
public static final int WORK_STARTED_FIELD_NUMBER = 1;
private long workStarted_;
/**
* int64 work_started = 1;
*/
public long getWorkStarted() {
return workStarted_;
}
public static final int WORK_FINISHED_FIELD_NUMBER = 2;
private long workFinished_;
/**
* int64 work_finished = 2;
*/
public long getWorkFinished() {
return workFinished_;
}
public static final int NUM_RECORDS_PRODUCED_FIELD_NUMBER = 3;
private long numRecordsProduced_;
/**
* int64 num_records_produced = 3;
*/
public long getNumRecordsProduced() {
return numRecordsProduced_;
}
public static final int CURRENT_WORK_FIELD_NUMBER = 4;
private com.github.os72.protobuf351.ByteString currentWork_;
/**
* bytes current_work = 4;
*/
public com.github.os72.protobuf351.ByteString getCurrentWork() {
return currentWork_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.github.os72.protobuf351.CodedOutputStream output)
throws java.io.IOException {
if (workStarted_ != 0L) {
output.writeInt64(1, workStarted_);
}
if (workFinished_ != 0L) {
output.writeInt64(2, workFinished_);
}
if (numRecordsProduced_ != 0L) {
output.writeInt64(3, numRecordsProduced_);
}
if (!currentWork_.isEmpty()) {
output.writeBytes(4, currentWork_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (workStarted_ != 0L) {
size += com.github.os72.protobuf351.CodedOutputStream
.computeInt64Size(1, workStarted_);
}
if (workFinished_ != 0L) {
size += com.github.os72.protobuf351.CodedOutputStream
.computeInt64Size(2, workFinished_);
}
if (numRecordsProduced_ != 0L) {
size += com.github.os72.protobuf351.CodedOutputStream
.computeInt64Size(3, numRecordsProduced_);
}
if (!currentWork_.isEmpty()) {
size += com.github.os72.protobuf351.CodedOutputStream
.computeBytesSize(4, currentWork_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.framework.ReaderBaseState)) {
return super.equals(obj);
}
org.tensorflow.framework.ReaderBaseState other = (org.tensorflow.framework.ReaderBaseState) obj;
boolean result = true;
result = result && (getWorkStarted()
== other.getWorkStarted());
result = result && (getWorkFinished()
== other.getWorkFinished());
result = result && (getNumRecordsProduced()
== other.getNumRecordsProduced());
result = result && getCurrentWork()
.equals(other.getCurrentWork());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + WORK_STARTED_FIELD_NUMBER;
hash = (53 * hash) + com.github.os72.protobuf351.Internal.hashLong(
getWorkStarted());
hash = (37 * hash) + WORK_FINISHED_FIELD_NUMBER;
hash = (53 * hash) + com.github.os72.protobuf351.Internal.hashLong(
getWorkFinished());
hash = (37 * hash) + NUM_RECORDS_PRODUCED_FIELD_NUMBER;
hash = (53 * hash) + com.github.os72.protobuf351.Internal.hashLong(
getNumRecordsProduced());
hash = (37 * hash) + CURRENT_WORK_FIELD_NUMBER;
hash = (53 * hash) + getCurrentWork().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
java.nio.ByteBuffer data)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
java.nio.ByteBuffer data,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
com.github.os72.protobuf351.ByteString data)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
com.github.os72.protobuf351.ByteString data,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(byte[] data)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
byte[] data,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.github.os72.protobuf351.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
java.io.InputStream input,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.github.os72.protobuf351.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ReaderBaseState parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.github.os72.protobuf351.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ReaderBaseState parseDelimitedFrom(
java.io.InputStream input,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.github.os72.protobuf351.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
com.github.os72.protobuf351.CodedInputStream input)
throws java.io.IOException {
return com.github.os72.protobuf351.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.framework.ReaderBaseState parseFrom(
com.github.os72.protobuf351.CodedInputStream input,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.github.os72.protobuf351.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.framework.ReaderBaseState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.github.os72.protobuf351.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* For serializing and restoring the state of ReaderBase, see
* reader_base.h for details.
*
*
* Protobuf type {@code tensorflow.ReaderBaseState}
*/
public static final class Builder extends
com.github.os72.protobuf351.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.ReaderBaseState)
org.tensorflow.framework.ReaderBaseStateOrBuilder {
public static final com.github.os72.protobuf351.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.framework.ReaderBaseProtos.internal_static_tensorflow_ReaderBaseState_descriptor;
}
protected com.github.os72.protobuf351.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.framework.ReaderBaseProtos.internal_static_tensorflow_ReaderBaseState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.framework.ReaderBaseState.class, org.tensorflow.framework.ReaderBaseState.Builder.class);
}
// Construct using org.tensorflow.framework.ReaderBaseState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.github.os72.protobuf351.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.github.os72.protobuf351.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
workStarted_ = 0L;
workFinished_ = 0L;
numRecordsProduced_ = 0L;
currentWork_ = com.github.os72.protobuf351.ByteString.EMPTY;
return this;
}
public com.github.os72.protobuf351.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.framework.ReaderBaseProtos.internal_static_tensorflow_ReaderBaseState_descriptor;
}
public org.tensorflow.framework.ReaderBaseState getDefaultInstanceForType() {
return org.tensorflow.framework.ReaderBaseState.getDefaultInstance();
}
public org.tensorflow.framework.ReaderBaseState build() {
org.tensorflow.framework.ReaderBaseState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.tensorflow.framework.ReaderBaseState buildPartial() {
org.tensorflow.framework.ReaderBaseState result = new org.tensorflow.framework.ReaderBaseState(this);
result.workStarted_ = workStarted_;
result.workFinished_ = workFinished_;
result.numRecordsProduced_ = numRecordsProduced_;
result.currentWork_ = currentWork_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.github.os72.protobuf351.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.github.os72.protobuf351.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.github.os72.protobuf351.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.github.os72.protobuf351.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.github.os72.protobuf351.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.github.os72.protobuf351.Message other) {
if (other instanceof org.tensorflow.framework.ReaderBaseState) {
return mergeFrom((org.tensorflow.framework.ReaderBaseState)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.framework.ReaderBaseState other) {
if (other == org.tensorflow.framework.ReaderBaseState.getDefaultInstance()) return this;
if (other.getWorkStarted() != 0L) {
setWorkStarted(other.getWorkStarted());
}
if (other.getWorkFinished() != 0L) {
setWorkFinished(other.getWorkFinished());
}
if (other.getNumRecordsProduced() != 0L) {
setNumRecordsProduced(other.getNumRecordsProduced());
}
if (other.getCurrentWork() != com.github.os72.protobuf351.ByteString.EMPTY) {
setCurrentWork(other.getCurrentWork());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.github.os72.protobuf351.CodedInputStream input,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.framework.ReaderBaseState parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.github.os72.protobuf351.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.framework.ReaderBaseState) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private long workStarted_ ;
/**
* int64 work_started = 1;
*/
public long getWorkStarted() {
return workStarted_;
}
/**
* int64 work_started = 1;
*/
public Builder setWorkStarted(long value) {
workStarted_ = value;
onChanged();
return this;
}
/**
* int64 work_started = 1;
*/
public Builder clearWorkStarted() {
workStarted_ = 0L;
onChanged();
return this;
}
private long workFinished_ ;
/**
* int64 work_finished = 2;
*/
public long getWorkFinished() {
return workFinished_;
}
/**
* int64 work_finished = 2;
*/
public Builder setWorkFinished(long value) {
workFinished_ = value;
onChanged();
return this;
}
/**
* int64 work_finished = 2;
*/
public Builder clearWorkFinished() {
workFinished_ = 0L;
onChanged();
return this;
}
private long numRecordsProduced_ ;
/**
* int64 num_records_produced = 3;
*/
public long getNumRecordsProduced() {
return numRecordsProduced_;
}
/**
* int64 num_records_produced = 3;
*/
public Builder setNumRecordsProduced(long value) {
numRecordsProduced_ = value;
onChanged();
return this;
}
/**
* int64 num_records_produced = 3;
*/
public Builder clearNumRecordsProduced() {
numRecordsProduced_ = 0L;
onChanged();
return this;
}
private com.github.os72.protobuf351.ByteString currentWork_ = com.github.os72.protobuf351.ByteString.EMPTY;
/**
* bytes current_work = 4;
*/
public com.github.os72.protobuf351.ByteString getCurrentWork() {
return currentWork_;
}
/**
* bytes current_work = 4;
*/
public Builder setCurrentWork(com.github.os72.protobuf351.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
currentWork_ = value;
onChanged();
return this;
}
/**
* bytes current_work = 4;
*/
public Builder clearCurrentWork() {
currentWork_ = getDefaultInstance().getCurrentWork();
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.github.os72.protobuf351.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.github.os72.protobuf351.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.ReaderBaseState)
}
// @@protoc_insertion_point(class_scope:tensorflow.ReaderBaseState)
private static final org.tensorflow.framework.ReaderBaseState DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.framework.ReaderBaseState();
}
public static org.tensorflow.framework.ReaderBaseState getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.github.os72.protobuf351.Parser
PARSER = new com.github.os72.protobuf351.AbstractParser() {
public ReaderBaseState parsePartialFrom(
com.github.os72.protobuf351.CodedInputStream input,
com.github.os72.protobuf351.ExtensionRegistryLite extensionRegistry)
throws com.github.os72.protobuf351.InvalidProtocolBufferException {
return new ReaderBaseState(input, extensionRegistry);
}
};
public static com.github.os72.protobuf351.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.github.os72.protobuf351.Parser getParserForType() {
return PARSER;
}
public org.tensorflow.framework.ReaderBaseState getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}