org.etlunit.json.validator.JsonUtils Maven / Gradle / Ivy
package org.etlunit.json.validator;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.JsonNodeFactory;
import org.codehaus.jackson.node.ObjectNode;
import org.codehaus.jackson.node.POJONode;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.*;
public class JsonUtils
{
public enum merge_type
{
/**
* If there is a conflict, the left value (this) wins
*/
left_merge,
/**
* If there is a conflict, the right value (other) wins
*/
right_merge,
/**
* If there is a conflict and the type is object on both sides,
* merge will recurse. Otherwise, the merge fails.
*/
distinct_merge
}
public static JsonNode deepCopy(JsonNode robject)
{
// just do a brute-force clone by toString -> parse
// That way there are no ties from one graph to another
try
{
return loadJson(robject.toString());
} catch (JsonSchemaValidationException e)
{
throw new IllegalArgumentException("Problem parsing json", e);
}
}
public static JsonNode merge(JsonNode lobject, JsonNode robject)
{
return merge(lobject, robject, merge_type.left_merge);
}
public static JsonNode merge(JsonNode lobject, JsonNode robject, merge_type type)
{
if (!lobject.isObject() || !robject.isObject())
{
throw new UnsupportedOperationException("Merge operations only supported on object types");
}
// determine who should be considered left and right
ObjectNode lobjnode = (ObjectNode) lobject;
ObjectNode robjnode = (ObjectNode) robject;
switch (type)
{
case left_merge:
// nothing - already set up for lmerge
break;
case right_merge:
// swap left and right to keep the logic as a left merge
lobjnode = (ObjectNode) robject;
robjnode = (ObjectNode) lobject;
break;
case distinct_merge:
break;
}
// start off with the reference (left) node. Copy to avoid affecting the original
ObjectNode mergeResult = JsonNodeFactory.instance.objectNode();
// create a unified key set
Set> setCombined = new TreeSet>(new Comparator>()
{
public int compare(Map.Entry o1, Map.Entry o2)
{
return o1.getKey().compareTo(o2.getKey());
}
public boolean equals(Object obj)
{
return obj == this;
}
});
setCombined.addAll(getFieldSet(lobjnode.getFields()));
setCombined.addAll(getFieldSet(robjnode.getFields()));
// iterate through the keys, and handle l/r insert / merge as required
for (Map.Entry s : setCombined)
{
// check the lvalue (me)
// check for distinctness
String key = s.getKey();
JsonNode lvalue = lobjnode.get(key);
JsonNode rvalue = robjnode.get(key);
// check for errors
if (lvalue == null && rvalue == null)
{
throw new IllegalStateException("What??!?");
}
else if (rvalue == null || lvalue == null)
{
// no need to merge recursively - simply add a deep copy
mergeResult.put(key, deepCopy(s.getValue()));
}
else
{
// both sides contain, need to merge.
if (!lvalue.equals(rvalue))
{
// different.
if (
(lvalue.isObject() && rvalue.isObject()) ||
(lvalue.isPojo() && rvalue.isPojo())
)
{
// recursively merge. This is always left or distinct (we resolved left and right earlier by swapping nodes
mergeResult.put(key, merge(lvalue, rvalue, type == merge_type.distinct_merge ? merge_type.distinct_merge : merge_type.left_merge));
}
else if (type == merge_type.distinct_merge)
{
throw new UnsupportedOperationException("Node types not compatible for distinct merge: [lvalue = '" + lvalue + "', rvalue = '" + rvalue + "']");
}
else
{
// no need to merge recursively - simply add a deep copy of the l value
mergeResult.put(key, deepCopy(lvalue));
}
}
else
{
// no need to merge recursively - simply add a deep copy
mergeResult.put(key, deepCopy(lvalue));
}
}
}
return mergeResult;
}
private static Collection> getFieldSet(Iterator> fields)
{
List> list = new ArrayList>();
while (fields.hasNext())
{
list.add(fields.next());
}
return list;
}
public static JsonNode loadJson(String schema) throws JsonSchemaValidationException
{
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = null;
try
{
rootNode = mapper.readValue(schema, JsonNode.class);
return rootNode;
} catch (IOException e)
{
throw new JsonSchemaValidationException("Error parsing json: " + schema, e);
}
}
public static JsonNode loadJson(File schema) throws JsonSchemaValidationException
{
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = null;
try
{
rootNode = mapper.readValue(schema, JsonNode.class);
return rootNode;
} catch (IOException e)
{
throw new JsonSchemaValidationException("Error parsing json: " + schema, e);
}
}
public static String printJson(JsonNode object) throws JsonSchemaValidationException
{
ObjectMapper mapper = new ObjectMapper();
try
{
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(object);
} catch (IOException e)
{
throw new JsonSchemaValidationException("Error parsing json: " + object, e);
}
}
public static JsonNode query(JsonNode node, String path)
{
JsonNode thisNode = node;
StringTokenizer st = new StringTokenizer(path, ".");
while (st.hasMoreTokens())
{
String token = st.nextToken();
if (!thisNode.isObject() && !thisNode.isPojo())
{
throw new UnsupportedOperationException("Path queries are only valid on object types");
}
else if (thisNode.isObject())
{
JsonNode jsonNode = thisNode.get(token);
if (jsonNode == null)
{
return null;
}
thisNode = jsonNode;
}
else if (thisNode.isPojo())
{
try
{
Object pojo = ((POJONode) thisNode).getPojo();
BeanInfo info = Introspector.getBeanInfo(pojo.getClass());
for (PropertyDescriptor pd : info.getPropertyDescriptors())
{
if (!pd.getName().equals(token))
{
continue;
}
Method m = pd.getReadMethod();
if (m != null)
{
Object invoke = m.invoke(pojo);
if (invoke instanceof String)
{
thisNode = JsonNodeFactory.instance.textNode((String) invoke);
}
else
{
thisNode = JsonNodeFactory.instance.POJONode(invoke);
}
}
}
} catch (Exception e)
{
throw new RuntimeException(e);
}
}
}
return thisNode;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy