
com.azure.resourcemanager.datafactory.models.SapHanaSource Maven / Gradle / Ivy
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.datafactory.models;
import com.azure.core.annotation.Fluent;
import com.azure.json.JsonReader;
import com.azure.json.JsonToken;
import com.azure.json.JsonWriter;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* A copy activity source for SAP HANA source.
*/
@Fluent
public final class SapHanaSource extends TabularSource {
/*
* Copy source type.
*/
private String type = "SapHanaSource";
/*
* SAP HANA Sql query. Type: string (or Expression with resultType string).
*/
private Object query;
/*
* The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer).
*/
private Object packetSize;
/*
* The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None",
* "PhysicalPartitionsOfTable", "SapHanaDynamicRange".
*/
private Object partitionOption;
/*
* The settings that will be leveraged for SAP HANA source partitioning.
*/
private SapHanaPartitionSettings partitionSettings;
/**
* Creates an instance of SapHanaSource class.
*/
public SapHanaSource() {
}
/**
* Get the type property: Copy source type.
*
* @return the type value.
*/
@Override
public String type() {
return this.type;
}
/**
* Get the query property: SAP HANA Sql query. Type: string (or Expression with resultType string).
*
* @return the query value.
*/
public Object query() {
return this.query;
}
/**
* Set the query property: SAP HANA Sql query. Type: string (or Expression with resultType string).
*
* @param query the query value to set.
* @return the SapHanaSource object itself.
*/
public SapHanaSource withQuery(Object query) {
this.query = query;
return this;
}
/**
* Get the packetSize property: The packet size of data read from SAP HANA. Type: integer(or Expression with
* resultType integer).
*
* @return the packetSize value.
*/
public Object packetSize() {
return this.packetSize;
}
/**
* Set the packetSize property: The packet size of data read from SAP HANA. Type: integer(or Expression with
* resultType integer).
*
* @param packetSize the packetSize value to set.
* @return the SapHanaSource object itself.
*/
public SapHanaSource withPacketSize(Object packetSize) {
this.packetSize = packetSize;
return this;
}
/**
* Get the partitionOption property: The partition mechanism that will be used for SAP HANA read in parallel.
* Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange".
*
* @return the partitionOption value.
*/
public Object partitionOption() {
return this.partitionOption;
}
/**
* Set the partitionOption property: The partition mechanism that will be used for SAP HANA read in parallel.
* Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange".
*
* @param partitionOption the partitionOption value to set.
* @return the SapHanaSource object itself.
*/
public SapHanaSource withPartitionOption(Object partitionOption) {
this.partitionOption = partitionOption;
return this;
}
/**
* Get the partitionSettings property: The settings that will be leveraged for SAP HANA source partitioning.
*
* @return the partitionSettings value.
*/
public SapHanaPartitionSettings partitionSettings() {
return this.partitionSettings;
}
/**
* Set the partitionSettings property: The settings that will be leveraged for SAP HANA source partitioning.
*
* @param partitionSettings the partitionSettings value to set.
* @return the SapHanaSource object itself.
*/
public SapHanaSource withPartitionSettings(SapHanaPartitionSettings partitionSettings) {
this.partitionSettings = partitionSettings;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SapHanaSource withQueryTimeout(Object queryTimeout) {
super.withQueryTimeout(queryTimeout);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SapHanaSource withAdditionalColumns(Object additionalColumns) {
super.withAdditionalColumns(additionalColumns);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SapHanaSource withSourceRetryCount(Object sourceRetryCount) {
super.withSourceRetryCount(sourceRetryCount);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SapHanaSource withSourceRetryWait(Object sourceRetryWait) {
super.withSourceRetryWait(sourceRetryWait);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SapHanaSource withMaxConcurrentConnections(Object maxConcurrentConnections) {
super.withMaxConcurrentConnections(maxConcurrentConnections);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SapHanaSource withDisableMetricsCollection(Object disableMetricsCollection) {
super.withDisableMetricsCollection(disableMetricsCollection);
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
@Override
public void validate() {
if (partitionSettings() != null) {
partitionSettings().validate();
}
}
/**
* {@inheritDoc}
*/
@Override
public JsonWriter toJson(JsonWriter jsonWriter) throws IOException {
jsonWriter.writeStartObject();
jsonWriter.writeUntypedField("sourceRetryCount", sourceRetryCount());
jsonWriter.writeUntypedField("sourceRetryWait", sourceRetryWait());
jsonWriter.writeUntypedField("maxConcurrentConnections", maxConcurrentConnections());
jsonWriter.writeUntypedField("disableMetricsCollection", disableMetricsCollection());
jsonWriter.writeUntypedField("queryTimeout", queryTimeout());
jsonWriter.writeUntypedField("additionalColumns", additionalColumns());
jsonWriter.writeStringField("type", this.type);
jsonWriter.writeUntypedField("query", this.query);
jsonWriter.writeUntypedField("packetSize", this.packetSize);
jsonWriter.writeUntypedField("partitionOption", this.partitionOption);
jsonWriter.writeJsonField("partitionSettings", this.partitionSettings);
if (additionalProperties() != null) {
for (Map.Entry additionalProperty : additionalProperties().entrySet()) {
jsonWriter.writeUntypedField(additionalProperty.getKey(), additionalProperty.getValue());
}
}
return jsonWriter.writeEndObject();
}
/**
* Reads an instance of SapHanaSource from the JsonReader.
*
* @param jsonReader The JsonReader being read.
* @return An instance of SapHanaSource if the JsonReader was pointing to an instance of it, or null if it was
* pointing to JSON null.
* @throws IOException If an error occurs while reading the SapHanaSource.
*/
public static SapHanaSource fromJson(JsonReader jsonReader) throws IOException {
return jsonReader.readObject(reader -> {
SapHanaSource deserializedSapHanaSource = new SapHanaSource();
Map additionalProperties = null;
while (reader.nextToken() != JsonToken.END_OBJECT) {
String fieldName = reader.getFieldName();
reader.nextToken();
if ("sourceRetryCount".equals(fieldName)) {
deserializedSapHanaSource.withSourceRetryCount(reader.readUntyped());
} else if ("sourceRetryWait".equals(fieldName)) {
deserializedSapHanaSource.withSourceRetryWait(reader.readUntyped());
} else if ("maxConcurrentConnections".equals(fieldName)) {
deserializedSapHanaSource.withMaxConcurrentConnections(reader.readUntyped());
} else if ("disableMetricsCollection".equals(fieldName)) {
deserializedSapHanaSource.withDisableMetricsCollection(reader.readUntyped());
} else if ("queryTimeout".equals(fieldName)) {
deserializedSapHanaSource.withQueryTimeout(reader.readUntyped());
} else if ("additionalColumns".equals(fieldName)) {
deserializedSapHanaSource.withAdditionalColumns(reader.readUntyped());
} else if ("type".equals(fieldName)) {
deserializedSapHanaSource.type = reader.getString();
} else if ("query".equals(fieldName)) {
deserializedSapHanaSource.query = reader.readUntyped();
} else if ("packetSize".equals(fieldName)) {
deserializedSapHanaSource.packetSize = reader.readUntyped();
} else if ("partitionOption".equals(fieldName)) {
deserializedSapHanaSource.partitionOption = reader.readUntyped();
} else if ("partitionSettings".equals(fieldName)) {
deserializedSapHanaSource.partitionSettings = SapHanaPartitionSettings.fromJson(reader);
} else {
if (additionalProperties == null) {
additionalProperties = new LinkedHashMap<>();
}
additionalProperties.put(fieldName, reader.readUntyped());
}
}
deserializedSapHanaSource.withAdditionalProperties(additionalProperties);
return deserializedSapHanaSource;
});
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy