Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.hudi.avro.model;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.util.Utf8;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.SchemaStore;
@org.apache.avro.specific.AvroGenerated
public class HoodieCommitMetadata extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 7951178712045514079L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"HoodieCommitMetadata\",\"namespace\":\"org.apache.hudi.avro.model\",\"fields\":[{\"name\":\"partitionToWriteStats\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"HoodieWriteStat\",\"fields\":[{\"name\":\"fileId\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"default\":null},{\"name\":\"path\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"default\":null},{\"name\":\"prevCommit\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"default\":null},{\"name\":\"numWrites\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"numDeletes\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"numUpdateWrites\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalWriteBytes\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalWriteErrors\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"partitionPath\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"default\":null},{\"name\":\"totalLogRecords\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalLogFiles\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalUpdatedRecordsCompacted\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"numInserts\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalLogBlocks\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalCorruptLogBlock\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"totalRollbackBlocks\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"fileSizeInBytes\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"logVersion\",\"type\":[\"null\",\"int\"],\"default\":null},{\"name\":\"logOffset\",\"type\":[\"null\",\"long\"],\"default\":null},{\"name\":\"baseFile\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"default\":null},{\"name\":\"logFiles\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"default\":null}],\"default\":null},{\"name\":\"cdcStats\",\"type\":[\"null\",{\"type\":\"map\",\"values\":\"long\",\"avro.java.string\":\"String\",\"default\":null}],\"default\":null}]}},\"avro.java.string\":\"String\"}],\"default\":null},{\"name\":\"extraMetadata\",\"type\":[\"null\",{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\",\"default\":null}],\"default\":null},{\"name\":\"version\",\"type\":[\"int\",\"null\"],\"default\":1},{\"name\":\"operationType\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"default\":null}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder ENCODER =
new BinaryMessageEncoder(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder DECODER =
new BinaryMessageDecoder(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}.
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore
*/
public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this HoodieCommitMetadata to a ByteBuffer.
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a HoodieCommitMetadata from a ByteBuffer.
* @param b a byte buffer holding serialized data for an instance of this class
* @return a HoodieCommitMetadata instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
*/
public static HoodieCommitMetadata fromByteBuffer(
java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
private java.util.Map> partitionToWriteStats;
private java.util.Map extraMetadata;
private java.lang.Integer version;
private java.lang.String operationType;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use newBuilder().
*/
public HoodieCommitMetadata() {}
/**
* All-args constructor.
* @param partitionToWriteStats The new value for partitionToWriteStats
* @param extraMetadata The new value for extraMetadata
* @param version The new value for version
* @param operationType The new value for operationType
*/
public HoodieCommitMetadata(java.util.Map> partitionToWriteStats, java.util.Map extraMetadata, java.lang.Integer version, java.lang.String operationType) {
this.partitionToWriteStats = partitionToWriteStats;
this.extraMetadata = extraMetadata;
this.version = version;
this.operationType = operationType;
}
public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return partitionToWriteStats;
case 1: return extraMetadata;
case 2: return version;
case 3: return operationType;
default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: partitionToWriteStats = (java.util.Map>)value$; break;
case 1: extraMetadata = (java.util.Map)value$; break;
case 2: version = (java.lang.Integer)value$; break;
case 3: operationType = value$ != null ? value$.toString() : null; break;
default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
/**
* Gets the value of the 'partitionToWriteStats' field.
* @return The value of the 'partitionToWriteStats' field.
*/
public java.util.Map> getPartitionToWriteStats() {
return partitionToWriteStats;
}
/**
* Sets the value of the 'partitionToWriteStats' field.
* @param value the value to set.
*/
public void setPartitionToWriteStats(java.util.Map> value) {
this.partitionToWriteStats = value;
}
/**
* Gets the value of the 'extraMetadata' field.
* @return The value of the 'extraMetadata' field.
*/
public java.util.Map getExtraMetadata() {
return extraMetadata;
}
/**
* Sets the value of the 'extraMetadata' field.
* @param value the value to set.
*/
public void setExtraMetadata(java.util.Map value) {
this.extraMetadata = value;
}
/**
* Gets the value of the 'version' field.
* @return The value of the 'version' field.
*/
public java.lang.Integer getVersion() {
return version;
}
/**
* Sets the value of the 'version' field.
* @param value the value to set.
*/
public void setVersion(java.lang.Integer value) {
this.version = value;
}
/**
* Gets the value of the 'operationType' field.
* @return The value of the 'operationType' field.
*/
public java.lang.String getOperationType() {
return operationType;
}
/**
* Sets the value of the 'operationType' field.
* @param value the value to set.
*/
public void setOperationType(java.lang.String value) {
this.operationType = value;
}
/**
* Creates a new HoodieCommitMetadata RecordBuilder.
* @return A new HoodieCommitMetadata RecordBuilder
*/
public static org.apache.hudi.avro.model.HoodieCommitMetadata.Builder newBuilder() {
return new org.apache.hudi.avro.model.HoodieCommitMetadata.Builder();
}
/**
* Creates a new HoodieCommitMetadata RecordBuilder by copying an existing Builder.
* @param other The existing builder to copy.
* @return A new HoodieCommitMetadata RecordBuilder
*/
public static org.apache.hudi.avro.model.HoodieCommitMetadata.Builder newBuilder(org.apache.hudi.avro.model.HoodieCommitMetadata.Builder other) {
if (other == null) {
return new org.apache.hudi.avro.model.HoodieCommitMetadata.Builder();
} else {
return new org.apache.hudi.avro.model.HoodieCommitMetadata.Builder(other);
}
}
/**
* Creates a new HoodieCommitMetadata RecordBuilder by copying an existing HoodieCommitMetadata instance.
* @param other The existing instance to copy.
* @return A new HoodieCommitMetadata RecordBuilder
*/
public static org.apache.hudi.avro.model.HoodieCommitMetadata.Builder newBuilder(org.apache.hudi.avro.model.HoodieCommitMetadata other) {
if (other == null) {
return new org.apache.hudi.avro.model.HoodieCommitMetadata.Builder();
} else {
return new org.apache.hudi.avro.model.HoodieCommitMetadata.Builder(other);
}
}
/**
* RecordBuilder for HoodieCommitMetadata instances.
*/
@org.apache.avro.specific.AvroGenerated
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.avro.data.RecordBuilder {
private java.util.Map> partitionToWriteStats;
private java.util.Map extraMetadata;
private java.lang.Integer version;
private java.lang.String operationType;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(org.apache.hudi.avro.model.HoodieCommitMetadata.Builder other) {
super(other);
if (isValidValue(fields()[0], other.partitionToWriteStats)) {
this.partitionToWriteStats = data().deepCopy(fields()[0].schema(), other.partitionToWriteStats);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.extraMetadata)) {
this.extraMetadata = data().deepCopy(fields()[1].schema(), other.extraMetadata);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.version)) {
this.version = data().deepCopy(fields()[2].schema(), other.version);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.operationType)) {
this.operationType = data().deepCopy(fields()[3].schema(), other.operationType);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
}
/**
* Creates a Builder by copying an existing HoodieCommitMetadata instance
* @param other The existing instance to copy.
*/
private Builder(org.apache.hudi.avro.model.HoodieCommitMetadata other) {
super(SCHEMA$);
if (isValidValue(fields()[0], other.partitionToWriteStats)) {
this.partitionToWriteStats = data().deepCopy(fields()[0].schema(), other.partitionToWriteStats);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.extraMetadata)) {
this.extraMetadata = data().deepCopy(fields()[1].schema(), other.extraMetadata);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.version)) {
this.version = data().deepCopy(fields()[2].schema(), other.version);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.operationType)) {
this.operationType = data().deepCopy(fields()[3].schema(), other.operationType);
fieldSetFlags()[3] = true;
}
}
/**
* Gets the value of the 'partitionToWriteStats' field.
* @return The value.
*/
public java.util.Map> getPartitionToWriteStats() {
return partitionToWriteStats;
}
/**
* Sets the value of the 'partitionToWriteStats' field.
* @param value The value of 'partitionToWriteStats'.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder setPartitionToWriteStats(java.util.Map> value) {
validate(fields()[0], value);
this.partitionToWriteStats = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'partitionToWriteStats' field has been set.
* @return True if the 'partitionToWriteStats' field has been set, false otherwise.
*/
public boolean hasPartitionToWriteStats() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'partitionToWriteStats' field.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder clearPartitionToWriteStats() {
partitionToWriteStats = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'extraMetadata' field.
* @return The value.
*/
public java.util.Map getExtraMetadata() {
return extraMetadata;
}
/**
* Sets the value of the 'extraMetadata' field.
* @param value The value of 'extraMetadata'.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder setExtraMetadata(java.util.Map value) {
validate(fields()[1], value);
this.extraMetadata = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'extraMetadata' field has been set.
* @return True if the 'extraMetadata' field has been set, false otherwise.
*/
public boolean hasExtraMetadata() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'extraMetadata' field.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder clearExtraMetadata() {
extraMetadata = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'version' field.
* @return The value.
*/
public java.lang.Integer getVersion() {
return version;
}
/**
* Sets the value of the 'version' field.
* @param value The value of 'version'.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder setVersion(java.lang.Integer value) {
validate(fields()[2], value);
this.version = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'version' field has been set.
* @return True if the 'version' field has been set, false otherwise.
*/
public boolean hasVersion() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'version' field.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder clearVersion() {
version = null;
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'operationType' field.
* @return The value.
*/
public java.lang.String getOperationType() {
return operationType;
}
/**
* Sets the value of the 'operationType' field.
* @param value The value of 'operationType'.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder setOperationType(java.lang.String value) {
validate(fields()[3], value);
this.operationType = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'operationType' field has been set.
* @return True if the 'operationType' field has been set, false otherwise.
*/
public boolean hasOperationType() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'operationType' field.
* @return This builder.
*/
public org.apache.hudi.avro.model.HoodieCommitMetadata.Builder clearOperationType() {
operationType = null;
fieldSetFlags()[3] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public HoodieCommitMetadata build() {
try {
HoodieCommitMetadata record = new HoodieCommitMetadata();
record.partitionToWriteStats = fieldSetFlags()[0] ? this.partitionToWriteStats : (java.util.Map>) defaultValue(fields()[0]);
record.extraMetadata = fieldSetFlags()[1] ? this.extraMetadata : (java.util.Map) defaultValue(fields()[1]);
record.version = fieldSetFlags()[2] ? this.version : (java.lang.Integer) defaultValue(fields()[2]);
record.operationType = fieldSetFlags()[3] ? this.operationType : (java.lang.String) defaultValue(fields()[3]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter
WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader
READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.avro.io.Encoder out)
throws java.io.IOException
{
if (this.partitionToWriteStats == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
long size0 = this.partitionToWriteStats.size();
out.writeMapStart();
out.setItemCount(size0);
long actualSize0 = 0;
for (java.util.Map.Entry> e0: this.partitionToWriteStats.entrySet()) {
actualSize0++;
out.startItem();
out.writeString(e0.getKey());
java.util.List v0 = e0.getValue();
long size1 = v0.size();
out.writeArrayStart();
out.setItemCount(size1);
long actualSize1 = 0;
for (org.apache.hudi.avro.model.HoodieWriteStat e1: v0) {
actualSize1++;
out.startItem();
e1.customEncode(out);
}
out.writeArrayEnd();
if (actualSize1 != size1)
throw new java.util.ConcurrentModificationException("Array-size written was " + size1 + ", but element count was " + actualSize1 + ".");
}
out.writeMapEnd();
if (actualSize0 != size0)
throw new java.util.ConcurrentModificationException("Map-size written was " + size0 + ", but element count was " + actualSize0 + ".");
}
if (this.extraMetadata == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
long size2 = this.extraMetadata.size();
out.writeMapStart();
out.setItemCount(size2);
long actualSize2 = 0;
for (java.util.Map.Entry e2: this.extraMetadata.entrySet()) {
actualSize2++;
out.startItem();
out.writeString(e2.getKey());
java.lang.String v2 = e2.getValue();
out.writeString(v2);
}
out.writeMapEnd();
if (actualSize2 != size2)
throw new java.util.ConcurrentModificationException("Map-size written was " + size2 + ", but element count was " + actualSize2 + ".");
}
if (this.version == null) {
out.writeIndex(1);
out.writeNull();
} else {
out.writeIndex(0);
out.writeInt(this.version);
}
if (this.operationType == null) {
out.writeIndex(0);
out.writeNull();
} else {
out.writeIndex(1);
out.writeString(this.operationType);
}
}
@Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
if (in.readIndex() != 1) {
in.readNull();
this.partitionToWriteStats = null;
} else {
long size0 = in.readMapStart();
java.util.Map> m0 = this.partitionToWriteStats; // Need fresh name due to limitation of macro system
if (m0 == null) {
m0 = new java.util.HashMap>((int)size0);
this.partitionToWriteStats = m0;
} else m0.clear();
for ( ; 0 < size0; size0 = in.mapNext()) {
for ( ; size0 != 0; size0--) {
java.lang.String k0 = null;
k0 = in.readString();
java.util.List v0 = null;
long size1 = in.readArrayStart();
java.util.List a1 = v0;
if (a1 == null) {
a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("partitionToWriteStats").schema().getTypes().get(1).getValueType());
v0 = a1;
} else a1.clear();
SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null);
for ( ; 0 < size1; size1 = in.arrayNext()) {
for ( ; size1 != 0; size1--) {
org.apache.hudi.avro.model.HoodieWriteStat e1 = (ga1 != null ? ga1.peek() : null);
if (e1 == null) {
e1 = new org.apache.hudi.avro.model.HoodieWriteStat();
}
e1.customDecode(in);
a1.add(e1);
}
}
m0.put(k0, v0);
}
}
}
if (in.readIndex() != 1) {
in.readNull();
this.extraMetadata = null;
} else {
long size2 = in.readMapStart();
java.util.Map m2 = this.extraMetadata; // Need fresh name due to limitation of macro system
if (m2 == null) {
m2 = new java.util.HashMap((int)size2);
this.extraMetadata = m2;
} else m2.clear();
for ( ; 0 < size2; size2 = in.mapNext()) {
for ( ; size2 != 0; size2--) {
java.lang.String k2 = null;
k2 = in.readString();
java.lang.String v2 = null;
v2 = in.readString();
m2.put(k2, v2);
}
}
}
if (in.readIndex() != 0) {
in.readNull();
this.version = null;
} else {
this.version = in.readInt();
}
if (in.readIndex() != 1) {
in.readNull();
this.operationType = null;
} else {
this.operationType = in.readString();
}
} else {
for (int i = 0; i < 4; i++) {
switch (fieldOrder[i].pos()) {
case 0:
if (in.readIndex() != 1) {
in.readNull();
this.partitionToWriteStats = null;
} else {
long size0 = in.readMapStart();
java.util.Map> m0 = this.partitionToWriteStats; // Need fresh name due to limitation of macro system
if (m0 == null) {
m0 = new java.util.HashMap>((int)size0);
this.partitionToWriteStats = m0;
} else m0.clear();
for ( ; 0 < size0; size0 = in.mapNext()) {
for ( ; size0 != 0; size0--) {
java.lang.String k0 = null;
k0 = in.readString();
java.util.List v0 = null;
long size1 = in.readArrayStart();
java.util.List a1 = v0;
if (a1 == null) {
a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("partitionToWriteStats").schema().getTypes().get(1).getValueType());
v0 = a1;
} else a1.clear();
SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null);
for ( ; 0 < size1; size1 = in.arrayNext()) {
for ( ; size1 != 0; size1--) {
org.apache.hudi.avro.model.HoodieWriteStat e1 = (ga1 != null ? ga1.peek() : null);
if (e1 == null) {
e1 = new org.apache.hudi.avro.model.HoodieWriteStat();
}
e1.customDecode(in);
a1.add(e1);
}
}
m0.put(k0, v0);
}
}
}
break;
case 1:
if (in.readIndex() != 1) {
in.readNull();
this.extraMetadata = null;
} else {
long size2 = in.readMapStart();
java.util.Map m2 = this.extraMetadata; // Need fresh name due to limitation of macro system
if (m2 == null) {
m2 = new java.util.HashMap((int)size2);
this.extraMetadata = m2;
} else m2.clear();
for ( ; 0 < size2; size2 = in.mapNext()) {
for ( ; size2 != 0; size2--) {
java.lang.String k2 = null;
k2 = in.readString();
java.lang.String v2 = null;
v2 = in.readString();
m2.put(k2, v2);
}
}
}
break;
case 2:
if (in.readIndex() != 0) {
in.readNull();
this.version = null;
} else {
this.version = in.readInt();
}
break;
case 3:
if (in.readIndex() != 1) {
in.readNull();
this.operationType = null;
} else {
this.operationType = in.readString();
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}