
org.radarcns.kafka.AggregateKey Maven / Gradle / Ivy
The newest version!
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.radarcns.kafka;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.util.Utf8;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.SchemaStore;
/** Key of an aggregated stream. The stream will work with time windows. */
@org.apache.avro.specific.AvroGenerated
public class AggregateKey extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 5764202605633051350L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AggregateKey\",\"namespace\":\"org.radarcns.kafka\",\"doc\":\"Key of an aggregated stream. The stream will work with time windows.\",\"fields\":[{\"name\":\"projectId\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"doc\":\"Project that the key belongs to.\",\"default\":null},{\"name\":\"userId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"User Identifier created during the enrolment.\"},{\"name\":\"sourceId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"Unique identifier associated with the source.\"},{\"name\":\"timeStart\",\"type\":\"double\",\"doc\":\"Time (seconds since the UNIX Epoch) of the time window start.\"},{\"name\":\"timeEnd\",\"type\":\"double\",\"doc\":\"Time (seconds since the UNIX Epoch) of the time window end.\"}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder ENCODER =
new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder DECODER =
new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
*/
public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this AggregateKey to a ByteBuffer.
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a AggregateKey from a ByteBuffer.
* @param b a byte buffer holding serialized data for an instance of this class
* @return a AggregateKey instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
*/
public static AggregateKey fromByteBuffer(
java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
/** Project that the key belongs to. */
private java.lang.String projectId;
/** User Identifier created during the enrolment. */
private java.lang.String userId;
/** Unique identifier associated with the source. */
private java.lang.String sourceId;
/** Time (seconds since the UNIX Epoch) of the time window start. */
private double timeStart;
/** Time (seconds since the UNIX Epoch) of the time window end. */
private double timeEnd;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder()
.
*/
public AggregateKey() {}
/**
* All-args constructor.
* @param projectId Project that the key belongs to.
* @param userId User Identifier created during the enrolment.
* @param sourceId Unique identifier associated with the source.
* @param timeStart Time (seconds since the UNIX Epoch) of the time window start.
* @param timeEnd Time (seconds since the UNIX Epoch) of the time window end.
*/
public AggregateKey(java.lang.String projectId, java.lang.String userId, java.lang.String sourceId, java.lang.Double timeStart, java.lang.Double timeEnd) {
this.projectId = projectId;
this.userId = userId;
this.sourceId = sourceId;
this.timeStart = timeStart;
this.timeEnd = timeEnd;
}
@Override
public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
@Override
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
@Override
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return projectId;
case 1: return userId;
case 2: return sourceId;
case 3: return timeStart;
case 4: return timeEnd;
default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
// Used by DatumReader. Applications should not call.
@Override
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: projectId = value$ != null ? value$.toString() : null; break;
case 1: userId = value$ != null ? value$.toString() : null; break;
case 2: sourceId = value$ != null ? value$.toString() : null; break;
case 3: timeStart = (java.lang.Double)value$; break;
case 4: timeEnd = (java.lang.Double)value$; break;
default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
/**
* Gets the value of the 'projectId' field.
* @return Project that the key belongs to.
*/
public java.lang.String getProjectId() {
return projectId;
}
/**
* Sets the value of the 'projectId' field.
* Project that the key belongs to.
* @param value the value to set.
*/
public void setProjectId(java.lang.String value) {
this.projectId = value;
}
/**
* Gets the value of the 'userId' field.
* @return User Identifier created during the enrolment.
*/
public java.lang.String getUserId() {
return userId;
}
/**
* Sets the value of the 'userId' field.
* User Identifier created during the enrolment.
* @param value the value to set.
*/
public void setUserId(java.lang.String value) {
this.userId = value;
}
/**
* Gets the value of the 'sourceId' field.
* @return Unique identifier associated with the source.
*/
public java.lang.String getSourceId() {
return sourceId;
}
/**
* Sets the value of the 'sourceId' field.
* Unique identifier associated with the source.
* @param value the value to set.
*/
public void setSourceId(java.lang.String value) {
this.sourceId = value;
}
/**
* Gets the value of the 'timeStart' field.
* @return Time (seconds since the UNIX Epoch) of the time window start.
*/
public double getTimeStart() {
return timeStart;
}
/**
* Sets the value of the 'timeStart' field.
* Time (seconds since the UNIX Epoch) of the time window start.
* @param value the value to set.
*/
public void setTimeStart(double value) {
this.timeStart = value;
}
/**
* Gets the value of the 'timeEnd' field.
* @return Time (seconds since the UNIX Epoch) of the time window end.
*/
public double getTimeEnd() {
return timeEnd;
}
/**
* Sets the value of the 'timeEnd' field.
* Time (seconds since the UNIX Epoch) of the time window end.
* @param value the value to set.
*/
public void setTimeEnd(double value) {
this.timeEnd = value;
}
/**
* Creates a new AggregateKey RecordBuilder.
* @return A new AggregateKey RecordBuilder
*/
public static org.radarcns.kafka.AggregateKey.Builder newBuilder() {
return new org.radarcns.kafka.AggregateKey.Builder();
}
/**
* Creates a new AggregateKey RecordBuilder by copying an existing Builder.
* @param other The existing builder to copy.
* @return A new AggregateKey RecordBuilder
*/
public static org.radarcns.kafka.AggregateKey.Builder newBuilder(org.radarcns.kafka.AggregateKey.Builder other) {
if (other == null) {
return new org.radarcns.kafka.AggregateKey.Builder();
} else {
return new org.radarcns.kafka.AggregateKey.Builder(other);
}
}
/**
* Creates a new AggregateKey RecordBuilder by copying an existing AggregateKey instance.
* @param other The existing instance to copy.
* @return A new AggregateKey RecordBuilder
*/
public static org.radarcns.kafka.AggregateKey.Builder newBuilder(org.radarcns.kafka.AggregateKey other) {
if (other == null) {
return new org.radarcns.kafka.AggregateKey.Builder();
} else {
return new org.radarcns.kafka.AggregateKey.Builder(other);
}
}
/**
* RecordBuilder for AggregateKey instances.
*/
@org.apache.avro.specific.AvroGenerated
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.avro.data.RecordBuilder {
/** Project that the key belongs to. */
private java.lang.String projectId;
/** User Identifier created during the enrolment. */
private java.lang.String userId;
/** Unique identifier associated with the source. */
private java.lang.String sourceId;
/** Time (seconds since the UNIX Epoch) of the time window start. */
private double timeStart;
/** Time (seconds since the UNIX Epoch) of the time window end. */
private double timeEnd;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(org.radarcns.kafka.AggregateKey.Builder other) {
super(other);
if (isValidValue(fields()[0], other.projectId)) {
this.projectId = data().deepCopy(fields()[0].schema(), other.projectId);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.userId)) {
this.userId = data().deepCopy(fields()[1].schema(), other.userId);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.sourceId)) {
this.sourceId = data().deepCopy(fields()[2].schema(), other.sourceId);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.timeStart)) {
this.timeStart = data().deepCopy(fields()[3].schema(), other.timeStart);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
if (isValidValue(fields()[4], other.timeEnd)) {
this.timeEnd = data().deepCopy(fields()[4].schema(), other.timeEnd);
fieldSetFlags()[4] = other.fieldSetFlags()[4];
}
}
/**
* Creates a Builder by copying an existing AggregateKey instance
* @param other The existing instance to copy.
*/
private Builder(org.radarcns.kafka.AggregateKey other) {
super(SCHEMA$, MODEL$);
if (isValidValue(fields()[0], other.projectId)) {
this.projectId = data().deepCopy(fields()[0].schema(), other.projectId);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.userId)) {
this.userId = data().deepCopy(fields()[1].schema(), other.userId);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.sourceId)) {
this.sourceId = data().deepCopy(fields()[2].schema(), other.sourceId);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.timeStart)) {
this.timeStart = data().deepCopy(fields()[3].schema(), other.timeStart);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.timeEnd)) {
this.timeEnd = data().deepCopy(fields()[4].schema(), other.timeEnd);
fieldSetFlags()[4] = true;
}
}
/**
* Gets the value of the 'projectId' field.
* Project that the key belongs to.
* @return The value.
*/
public java.lang.String getProjectId() {
return projectId;
}
/**
* Sets the value of the 'projectId' field.
* Project that the key belongs to.
* @param value The value of 'projectId'.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder setProjectId(java.lang.String value) {
validate(fields()[0], value);
this.projectId = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'projectId' field has been set.
* Project that the key belongs to.
* @return True if the 'projectId' field has been set, false otherwise.
*/
public boolean hasProjectId() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'projectId' field.
* Project that the key belongs to.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder clearProjectId() {
projectId = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'userId' field.
* User Identifier created during the enrolment.
* @return The value.
*/
public java.lang.String getUserId() {
return userId;
}
/**
* Sets the value of the 'userId' field.
* User Identifier created during the enrolment.
* @param value The value of 'userId'.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder setUserId(java.lang.String value) {
validate(fields()[1], value);
this.userId = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'userId' field has been set.
* User Identifier created during the enrolment.
* @return True if the 'userId' field has been set, false otherwise.
*/
public boolean hasUserId() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'userId' field.
* User Identifier created during the enrolment.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder clearUserId() {
userId = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'sourceId' field.
* Unique identifier associated with the source.
* @return The value.
*/
public java.lang.String getSourceId() {
return sourceId;
}
/**
* Sets the value of the 'sourceId' field.
* Unique identifier associated with the source.
* @param value The value of 'sourceId'.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder setSourceId(java.lang.String value) {
validate(fields()[2], value);
this.sourceId = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'sourceId' field has been set.
* Unique identifier associated with the source.
* @return True if the 'sourceId' field has been set, false otherwise.
*/
public boolean hasSourceId() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'sourceId' field.
* Unique identifier associated with the source.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder clearSourceId() {
sourceId = null;
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'timeStart' field.
* Time (seconds since the UNIX Epoch) of the time window start.
* @return The value.
*/
public double getTimeStart() {
return timeStart;
}
/**
* Sets the value of the 'timeStart' field.
* Time (seconds since the UNIX Epoch) of the time window start.
* @param value The value of 'timeStart'.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder setTimeStart(double value) {
validate(fields()[3], value);
this.timeStart = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'timeStart' field has been set.
* Time (seconds since the UNIX Epoch) of the time window start.
* @return True if the 'timeStart' field has been set, false otherwise.
*/
public boolean hasTimeStart() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'timeStart' field.
* Time (seconds since the UNIX Epoch) of the time window start.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder clearTimeStart() {
fieldSetFlags()[3] = false;
return this;
}
/**
* Gets the value of the 'timeEnd' field.
* Time (seconds since the UNIX Epoch) of the time window end.
* @return The value.
*/
public double getTimeEnd() {
return timeEnd;
}
/**
* Sets the value of the 'timeEnd' field.
* Time (seconds since the UNIX Epoch) of the time window end.
* @param value The value of 'timeEnd'.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder setTimeEnd(double value) {
validate(fields()[4], value);
this.timeEnd = value;
fieldSetFlags()[4] = true;
return this;
}
/**
* Checks whether the 'timeEnd' field has been set.
* Time (seconds since the UNIX Epoch) of the time window end.
* @return True if the 'timeEnd' field has been set, false otherwise.
*/
public boolean hasTimeEnd() {
return fieldSetFlags()[4];
}
/**
* Clears the value of the 'timeEnd' field.
* Time (seconds since the UNIX Epoch) of the time window end.
* @return This builder.
*/
public org.radarcns.kafka.AggregateKey.Builder clearTimeEnd() {
fieldSetFlags()[4] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public AggregateKey build() {
try {
AggregateKey record = new AggregateKey();
record.projectId = fieldSetFlags()[0] ? this.projectId : (java.lang.String) defaultValue(fields()[0]);
record.userId = fieldSetFlags()[1] ? this.userId : (java.lang.String) defaultValue(fields()[1]);
record.sourceId = fieldSetFlags()[2] ? this.sourceId : (java.lang.String) defaultValue(fields()[2]);
record.timeStart = fieldSetFlags()[3] ? this.timeStart : (java.lang.Double) defaultValue(fields()[3]);
record.timeEnd = fieldSetFlags()[4] ? this.timeEnd : (java.lang.Double) defaultValue(fields()[4]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter
WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader
READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.avro.io.Encoder out)
throws java.io.IOException
{
if (this.projectId == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
out.writeString(this.projectId);
}
out.writeString(this.userId);
out.writeString(this.sourceId);
out.writeDouble(this.timeStart);
out.writeDouble(this.timeEnd);
}
@Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
if (in.readIndex() != 1) {
in.readNull();
this.projectId = null;
} else {
this.projectId = in.readString();
}
this.userId = in.readString();
this.sourceId = in.readString();
this.timeStart = in.readDouble();
this.timeEnd = in.readDouble();
} else {
for (int i = 0; i < 5; i++) {
switch (fieldOrder[i].pos()) {
case 0:
if (in.readIndex() != 1) {
in.readNull();
this.projectId = null;
} else {
this.projectId = in.readString();
}
break;
case 1:
this.userId = in.readString();
break;
case 2:
this.sourceId = in.readString();
break;
case 3:
this.timeStart = in.readDouble();
break;
case 4:
this.timeEnd = in.readDouble();
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy