
main.app.cash.backfila.protos.clientservice.RunBatchResponse Maven / Gradle / Ivy
// Code generated by Wire protocol buffer compiler, do not edit.
// Source: app.cash.backfila.protos.clientservice.RunBatchResponse in app/cash/backfila/client_service.proto
package app.cash.backfila.protos.clientservice;
import com.squareup.wire.FieldEncoding;
import com.squareup.wire.Message;
import com.squareup.wire.ProtoAdapter;
import com.squareup.wire.ProtoReader;
import com.squareup.wire.ProtoWriter;
import com.squareup.wire.ReverseProtoWriter;
import com.squareup.wire.Syntax;
import com.squareup.wire.WireField;
import com.squareup.wire.internal.Internal;
import java.io.IOException;
import java.lang.Long;
import java.lang.Object;
import java.lang.Override;
import java.lang.String;
import java.lang.StringBuilder;
import okio.ByteString;
public final class RunBatchResponse extends Message {
public static final ProtoAdapter ADAPTER = new ProtoAdapter_RunBatchResponse();
private static final long serialVersionUID = 0L;
public static final Long DEFAULT_BACKOFF_MS = 0L;
public static final String DEFAULT_EXCEPTION_STACK_TRACE = "";
/**
* How much to sleep before starting new batches. For example can be set to replication lag.
*/
@WireField(
tag = 1,
adapter = "com.squareup.wire.ProtoAdapter#UINT64"
)
public final Long backoff_ms;
@WireField(
tag = 2,
adapter = "app.cash.backfila.protos.clientservice.PipelinedData#ADAPTER"
)
public final PipelinedData pipelined_data;
/**
* Stacktrace that will be surfaced to the user.
* If this is provided this batch request is assumed to have failed.
*/
@WireField(
tag = 3,
adapter = "com.squareup.wire.ProtoAdapter#STRING"
)
public final String exception_stack_trace;
/**
* Provided data from the backfila client telling backfila what part of the batch was not
* completed yet. Backfila will end up resuming with a new request containing either the remaining
* range or the original range.
* This is used by Backfila clients to relay what progress was completed in situations like
* time consuming batches.
*/
@WireField(
tag = 8,
adapter = "app.cash.backfila.protos.clientservice.KeyRange#ADAPTER"
)
public final KeyRange remaining_batch_range;
public RunBatchResponse(Long backoff_ms, PipelinedData pipelined_data,
String exception_stack_trace, KeyRange remaining_batch_range) {
this(backoff_ms, pipelined_data, exception_stack_trace, remaining_batch_range, ByteString.EMPTY);
}
public RunBatchResponse(Long backoff_ms, PipelinedData pipelined_data,
String exception_stack_trace, KeyRange remaining_batch_range, ByteString unknownFields) {
super(ADAPTER, unknownFields);
this.backoff_ms = backoff_ms;
this.pipelined_data = pipelined_data;
this.exception_stack_trace = exception_stack_trace;
this.remaining_batch_range = remaining_batch_range;
}
@Override
public Builder newBuilder() {
Builder builder = new Builder();
builder.backoff_ms = backoff_ms;
builder.pipelined_data = pipelined_data;
builder.exception_stack_trace = exception_stack_trace;
builder.remaining_batch_range = remaining_batch_range;
builder.addUnknownFields(unknownFields());
return builder;
}
@Override
public boolean equals(Object other) {
if (other == this) return true;
if (!(other instanceof RunBatchResponse)) return false;
RunBatchResponse o = (RunBatchResponse) other;
return unknownFields().equals(o.unknownFields())
&& Internal.equals(backoff_ms, o.backoff_ms)
&& Internal.equals(pipelined_data, o.pipelined_data)
&& Internal.equals(exception_stack_trace, o.exception_stack_trace)
&& Internal.equals(remaining_batch_range, o.remaining_batch_range);
}
@Override
public int hashCode() {
int result = super.hashCode;
if (result == 0) {
result = unknownFields().hashCode();
result = result * 37 + (backoff_ms != null ? backoff_ms.hashCode() : 0);
result = result * 37 + (pipelined_data != null ? pipelined_data.hashCode() : 0);
result = result * 37 + (exception_stack_trace != null ? exception_stack_trace.hashCode() : 0);
result = result * 37 + (remaining_batch_range != null ? remaining_batch_range.hashCode() : 0);
super.hashCode = result;
}
return result;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
if (backoff_ms != null) builder.append(", backoff_ms=").append(backoff_ms);
if (pipelined_data != null) builder.append(", pipelined_data=").append(pipelined_data);
if (exception_stack_trace != null) builder.append(", exception_stack_trace=").append(Internal.sanitize(exception_stack_trace));
if (remaining_batch_range != null) builder.append(", remaining_batch_range=").append(remaining_batch_range);
return builder.replace(0, 2, "RunBatchResponse{").append('}').toString();
}
public static final class Builder extends Message.Builder {
public Long backoff_ms;
public PipelinedData pipelined_data;
public String exception_stack_trace;
public KeyRange remaining_batch_range;
public Builder() {
}
/**
* How much to sleep before starting new batches. For example can be set to replication lag.
*/
public Builder backoff_ms(Long backoff_ms) {
this.backoff_ms = backoff_ms;
return this;
}
public Builder pipelined_data(PipelinedData pipelined_data) {
this.pipelined_data = pipelined_data;
return this;
}
/**
* Stacktrace that will be surfaced to the user.
* If this is provided this batch request is assumed to have failed.
*/
public Builder exception_stack_trace(String exception_stack_trace) {
this.exception_stack_trace = exception_stack_trace;
return this;
}
/**
* Provided data from the backfila client telling backfila what part of the batch was not
* completed yet. Backfila will end up resuming with a new request containing either the remaining
* range or the original range.
* This is used by Backfila clients to relay what progress was completed in situations like
* time consuming batches.
*/
public Builder remaining_batch_range(KeyRange remaining_batch_range) {
this.remaining_batch_range = remaining_batch_range;
return this;
}
@Override
public RunBatchResponse build() {
return new RunBatchResponse(backoff_ms, pipelined_data, exception_stack_trace, remaining_batch_range, super.buildUnknownFields());
}
}
private static final class ProtoAdapter_RunBatchResponse extends ProtoAdapter {
public ProtoAdapter_RunBatchResponse() {
super(FieldEncoding.LENGTH_DELIMITED, RunBatchResponse.class, "type.googleapis.com/app.cash.backfila.protos.clientservice.RunBatchResponse", Syntax.PROTO_2, null, "app/cash/backfila/client_service.proto");
}
@Override
public int encodedSize(RunBatchResponse value) {
int result = 0;
result += ProtoAdapter.UINT64.encodedSizeWithTag(1, value.backoff_ms);
result += PipelinedData.ADAPTER.encodedSizeWithTag(2, value.pipelined_data);
result += ProtoAdapter.STRING.encodedSizeWithTag(3, value.exception_stack_trace);
result += KeyRange.ADAPTER.encodedSizeWithTag(8, value.remaining_batch_range);
result += value.unknownFields().size();
return result;
}
@Override
public void encode(ProtoWriter writer, RunBatchResponse value) throws IOException {
ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.backoff_ms);
PipelinedData.ADAPTER.encodeWithTag(writer, 2, value.pipelined_data);
ProtoAdapter.STRING.encodeWithTag(writer, 3, value.exception_stack_trace);
KeyRange.ADAPTER.encodeWithTag(writer, 8, value.remaining_batch_range);
writer.writeBytes(value.unknownFields());
}
@Override
public void encode(ReverseProtoWriter writer, RunBatchResponse value) throws IOException {
writer.writeBytes(value.unknownFields());
KeyRange.ADAPTER.encodeWithTag(writer, 8, value.remaining_batch_range);
ProtoAdapter.STRING.encodeWithTag(writer, 3, value.exception_stack_trace);
PipelinedData.ADAPTER.encodeWithTag(writer, 2, value.pipelined_data);
ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.backoff_ms);
}
@Override
public RunBatchResponse decode(ProtoReader reader) throws IOException {
Builder builder = new Builder();
long token = reader.beginMessage();
for (int tag; (tag = reader.nextTag()) != -1;) {
switch (tag) {
case 1: builder.backoff_ms(ProtoAdapter.UINT64.decode(reader)); break;
case 2: builder.pipelined_data(PipelinedData.ADAPTER.decode(reader)); break;
case 3: builder.exception_stack_trace(ProtoAdapter.STRING.decode(reader)); break;
case 8: builder.remaining_batch_range(KeyRange.ADAPTER.decode(reader)); break;
default: {
reader.readUnknownField(tag);
}
}
}
builder.addUnknownFields(reader.endMessageAndGetUnknownFields(token));
return builder.build();
}
@Override
public RunBatchResponse redact(RunBatchResponse value) {
Builder builder = value.newBuilder();
if (builder.pipelined_data != null) builder.pipelined_data = PipelinedData.ADAPTER.redact(builder.pipelined_data);
if (builder.remaining_batch_range != null) builder.remaining_batch_range = KeyRange.ADAPTER.redact(builder.remaining_batch_range);
builder.clearUnknownFields();
return builder.build();
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy