com.datadog.api.client.v2.model.SensitiveDataScannerGetConfigIncludedItem Maven / Gradle / Ivy
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2019-Present Datadog, Inc.
*/
package com.datadog.api.client.v2.model;
import com.datadog.api.client.AbstractOpenApiSchema;
import com.datadog.api.client.JSON;
import com.datadog.api.client.UnparsedObject;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import jakarta.ws.rs.core.GenericType;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
@jakarta.annotation.Generated(
value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
@JsonDeserialize(
using =
SensitiveDataScannerGetConfigIncludedItem
.SensitiveDataScannerGetConfigIncludedItemDeserializer.class)
@JsonSerialize(
using =
SensitiveDataScannerGetConfigIncludedItem
.SensitiveDataScannerGetConfigIncludedItemSerializer.class)
public class SensitiveDataScannerGetConfigIncludedItem extends AbstractOpenApiSchema {
private static final Logger log =
Logger.getLogger(SensitiveDataScannerGetConfigIncludedItem.class.getName());
@JsonIgnore public boolean unparsed = false;
public static class SensitiveDataScannerGetConfigIncludedItemSerializer
extends StdSerializer {
public SensitiveDataScannerGetConfigIncludedItemSerializer(
Class t) {
super(t);
}
public SensitiveDataScannerGetConfigIncludedItemSerializer() {
this(null);
}
@Override
public void serialize(
SensitiveDataScannerGetConfigIncludedItem value,
JsonGenerator jgen,
SerializerProvider provider)
throws IOException, JsonProcessingException {
jgen.writeObject(value.getActualInstance());
}
}
public static class SensitiveDataScannerGetConfigIncludedItemDeserializer
extends StdDeserializer {
public SensitiveDataScannerGetConfigIncludedItemDeserializer() {
this(SensitiveDataScannerGetConfigIncludedItem.class);
}
public SensitiveDataScannerGetConfigIncludedItemDeserializer(Class> vc) {
super(vc);
}
@Override
public SensitiveDataScannerGetConfigIncludedItem deserialize(
JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
JsonNode tree = jp.readValueAsTree();
Object deserialized = null;
Object tmp = null;
boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS);
int match = 0;
JsonToken token = tree.traverse(jp.getCodec()).nextToken();
// deserialize SensitiveDataScannerRuleIncludedItem
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
if (SensitiveDataScannerRuleIncludedItem.class.equals(Integer.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(Long.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(Float.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(Double.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(Boolean.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
((SensitiveDataScannerRuleIncludedItem.class.equals(Integer.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
((SensitiveDataScannerRuleIncludedItem.class.equals(Float.class)
|| SensitiveDataScannerRuleIncludedItem.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
(SensitiveDataScannerRuleIncludedItem.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
(SensitiveDataScannerRuleIncludedItem.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec()).readValueAs(SensitiveDataScannerRuleIncludedItem.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
if (!((SensitiveDataScannerRuleIncludedItem) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(Level.FINER, "Input data matches schema 'SensitiveDataScannerRuleIncludedItem'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
"Input data does not match schema 'SensitiveDataScannerRuleIncludedItem'",
e);
}
// deserialize SensitiveDataScannerGroupIncludedItem
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
if (SensitiveDataScannerGroupIncludedItem.class.equals(Integer.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(Long.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(Float.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(Double.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(Boolean.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
((SensitiveDataScannerGroupIncludedItem.class.equals(Integer.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
((SensitiveDataScannerGroupIncludedItem.class.equals(Float.class)
|| SensitiveDataScannerGroupIncludedItem.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
(SensitiveDataScannerGroupIncludedItem.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
(SensitiveDataScannerGroupIncludedItem.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec()).readValueAs(SensitiveDataScannerGroupIncludedItem.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
if (!((SensitiveDataScannerGroupIncludedItem) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(Level.FINER, "Input data matches schema 'SensitiveDataScannerGroupIncludedItem'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
"Input data does not match schema 'SensitiveDataScannerGroupIncludedItem'",
e);
}
SensitiveDataScannerGetConfigIncludedItem ret =
new SensitiveDataScannerGetConfigIncludedItem();
if (match == 1) {
ret.setActualInstance(deserialized);
} else {
Map res =
new ObjectMapper()
.readValue(
tree.traverse(jp.getCodec()).readValueAsTree().toString(),
new TypeReference
© 2015 - 2025 Weber Informatics LLC | Privacy Policy