All Downloads are FREE. Search and download functionalities are using the official Maven repository.
Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.hedera.hapi.streams.codec.RecordStreamFileJsonCodec Maven / Gradle / Ivy
package com.hedera.hapi.streams.codec;
import com.hedera.pbj.runtime.*;
import com.hedera.pbj.runtime.io.*;
import com.hedera.pbj.runtime.io.buffer.*;
import java.io.IOException;
import java.nio.*;
import java.nio.charset.*;
import java.util.*;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import com.hedera.hapi.streams.RecordStreamFile;
import com.hedera.hapi.node.base.*;
import com.hedera.hapi.node.base.codec.*;
import com.hedera.hapi.streams.*;
import com.hedera.hapi.streams.schema.*;
import java.util.*;
import com.hedera.pbj.runtime.jsonparser.*;
import static com.hedera.hapi.streams.schema.RecordStreamFileSchema.*;
import static com.hedera.pbj.runtime.JsonTools.*;
/**
* JSON Codec for RecordStreamFile model object. Generated based on protobuf schema.
*/
public final class RecordStreamFileJsonCodec implements JsonCodec {
/**
* Parses a HashObject object from JSON parse tree for object JSONParser.ObjContext.
* Throws an UnknownFieldException wrapped in a ParseException if in strict mode ONLY.
*
* @param root The JSON parsed object tree to parse data from
* @return Parsed HashObject model object or null if data input was null or empty
* @throws ParseException If parsing fails
*/
public @NonNull RecordStreamFile parse(
@Nullable final JSONParser.ObjContext root,
final boolean strictMode,
final int maxDepth) throws ParseException {
if (maxDepth < 0) {
throw new ParseException("Reached maximum allowed depth of nested messages");
}
try {
// -- TEMP STATE FIELDS --------------------------------------
SemanticVersion temp_hapi_proto_version = null;
HashObject temp_start_object_running_hash = null;
List temp_record_stream_items = Collections.emptyList();
HashObject temp_end_object_running_hash = null;
long temp_block_number = 0;
List temp_sidecars = Collections.emptyList();
// -- EXTRACT VALUES FROM PARSE TREE ---------------------------------------------
for (JSONParser.PairContext kvPair : root.pair()) {
switch (kvPair.STRING().getText()) {
case "hapiProtoVersion" /* [1] */ : temp_hapi_proto_version = SemanticVersion.JSON.parse(kvPair.value().getChild(JSONParser.ObjContext.class, 0), false, maxDepth - 1); break;
case "startObjectRunningHash" /* [2] */ : temp_start_object_running_hash = HashObject.JSON.parse(kvPair.value().getChild(JSONParser.ObjContext.class, 0), false, maxDepth - 1); break;
case "recordStreamItems" /* [3] */ : temp_record_stream_items = parseObjArray(kvPair.value().arr(), RecordStreamItem.JSON, maxDepth - 1); break;
case "endObjectRunningHash" /* [4] */ : temp_end_object_running_hash = HashObject.JSON.parse(kvPair.value().getChild(JSONParser.ObjContext.class, 0), false, maxDepth - 1); break;
case "blockNumber" /* [5] */ : temp_block_number = parseLong(kvPair.value()); break;
case "sidecars" /* [6] */ : temp_sidecars = parseObjArray(kvPair.value().arr(), SidecarMetadata.JSON, maxDepth - 1); break;
default: {
if (strictMode) {
// Since we are parsing is strict mode, this is an exceptional condition.
throw new UnknownFieldException(kvPair.STRING().getText());
}
}
}
}
return new RecordStreamFile(temp_hapi_proto_version, temp_start_object_running_hash, temp_record_stream_items, temp_end_object_running_hash, temp_block_number, temp_sidecars);
} catch (Exception ex) {
throw new ParseException(ex);
}
}
/**
* Returns JSON string representing an item.
*
* @param data The item to convert. Must not be null.
* @param indent The indent to use for pretty printing
* @param inline When true the output will start with indent end with a new line otherwise
* it will just be the object "{...}"
*/
@Override
public String toJSON(@NonNull RecordStreamFile data, String indent, boolean inline) {
StringBuilder sb = new StringBuilder();
// start
sb.append(inline ? "{\n" : indent + "{\n");
final String childIndent = indent + INDENT;
// collect field lines
final List fieldLines = new ArrayList<>();
// [1] - hapi_proto_version
if (data.hapiProtoVersion() != null) fieldLines.add(field(childIndent, "hapiProtoVersion", com.hedera.hapi.node.base.SemanticVersion.JSON, data.hapiProtoVersion()));
// [2] - start_object_running_hash
if (data.startObjectRunningHash() != null) fieldLines.add(field(childIndent, "startObjectRunningHash", com.hedera.hapi.streams.HashObject.JSON, data.startObjectRunningHash()));
// [3] - record_stream_items
if (!data.recordStreamItems().isEmpty()) fieldLines.add(arrayField(childIndent, "recordStreamItems", com.hedera.hapi.streams.RecordStreamItem.JSON, data.recordStreamItems()));
// [4] - end_object_running_hash
if (data.endObjectRunningHash() != null) fieldLines.add(field(childIndent, "endObjectRunningHash", com.hedera.hapi.streams.HashObject.JSON, data.endObjectRunningHash()));
// [5] - block_number
if (data.blockNumber() != 0) fieldLines.add(field("blockNumber", data.blockNumber()));
// [6] - sidecars
if (!data.sidecars().isEmpty()) fieldLines.add(arrayField(childIndent, "sidecars", com.hedera.hapi.streams.SidecarMetadata.JSON, data.sidecars()));
// write field lines
if (!fieldLines.isEmpty()){
sb.append(childIndent);
sb.append(String.join(",\n"+childIndent, fieldLines));
sb.append("\n");
}
// end
sb.append(indent + "}");
return sb.toString();
}
}