com.google.api.services.dataplex.v1.model.GoogleCloudDataplexV1MetadataJobImportJobSpec Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dataplex.v1.model;
/**
* Job specification for a metadata import job
*
* This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Dataplex API. For a detailed explanation see:
* https://developers.google.com/api-client-library/java/google-http-java-client/json
*
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDataplexV1MetadataJobImportJobSpec extends com.google.api.client.json.GenericJson {
/**
* Required. The sync mode for aspects. Only INCREMENTAL mode is supported for aspects. An aspect
* is modified only if the metadata import file includes a reference to the aspect in the
* update_mask field and the aspect_keys field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String aspectSyncMode;
/**
* Required. The sync mode for entries. Only FULL mode is supported for entries. All entries in
* the job's scope are modified. If an entry exists in Dataplex but isn't included in the metadata
* import file, the entry is deleted when you run the metadata job.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String entrySyncMode;
/**
* Optional. The level of logs to write to Cloud Logging for this job.Debug-level logs provide
* highly-detailed information for troubleshooting, but their increased verbosity could incur
* additional costs (https://cloud.google.com/stackdriver/pricing) that might not be merited for
* all jobs.If unspecified, defaults to INFO.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String logLevel;
/**
* Required. A boundary on the scope of impact that the metadata import job can have.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope scope;
/**
* Optional. The time when the process that created the metadata import files began.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String sourceCreateTime;
/**
* Optional. The URI of a Cloud Storage bucket or folder (beginning with gs:// and ending with /)
* that contains the metadata import files for this job.A metadata import file defines the values
* to set for each of the entries and aspects in a metadata job. For more information about how to
* create a metadata import file and the file requirements, see Metadata import file
* (https://cloud.google.com/dataplex/docs/import-metadata#metadata-import-file).You can provide
* multiple metadata import files in the same metadata job. The bucket or folder must contain at
* least one metadata import file, in JSON Lines format (either .json or .jsonl file extension).In
* FULL entry sync mode, don't save the metadata import file in a folder named
* SOURCE_STORAGE_URI/deletions/.Caution: If the metadata import file contains no data, all
* entries and aspects that belong to the job's scope are deleted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceStorageUri;
/**
* Required. The sync mode for aspects. Only INCREMENTAL mode is supported for aspects. An aspect
* is modified only if the metadata import file includes a reference to the aspect in the
* update_mask field and the aspect_keys field.
* @return value or {@code null} for none
*/
public java.lang.String getAspectSyncMode() {
return aspectSyncMode;
}
/**
* Required. The sync mode for aspects. Only INCREMENTAL mode is supported for aspects. An aspect
* is modified only if the metadata import file includes a reference to the aspect in the
* update_mask field and the aspect_keys field.
* @param aspectSyncMode aspectSyncMode or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpec setAspectSyncMode(java.lang.String aspectSyncMode) {
this.aspectSyncMode = aspectSyncMode;
return this;
}
/**
* Required. The sync mode for entries. Only FULL mode is supported for entries. All entries in
* the job's scope are modified. If an entry exists in Dataplex but isn't included in the metadata
* import file, the entry is deleted when you run the metadata job.
* @return value or {@code null} for none
*/
public java.lang.String getEntrySyncMode() {
return entrySyncMode;
}
/**
* Required. The sync mode for entries. Only FULL mode is supported for entries. All entries in
* the job's scope are modified. If an entry exists in Dataplex but isn't included in the metadata
* import file, the entry is deleted when you run the metadata job.
* @param entrySyncMode entrySyncMode or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpec setEntrySyncMode(java.lang.String entrySyncMode) {
this.entrySyncMode = entrySyncMode;
return this;
}
/**
* Optional. The level of logs to write to Cloud Logging for this job.Debug-level logs provide
* highly-detailed information for troubleshooting, but their increased verbosity could incur
* additional costs (https://cloud.google.com/stackdriver/pricing) that might not be merited for
* all jobs.If unspecified, defaults to INFO.
* @return value or {@code null} for none
*/
public java.lang.String getLogLevel() {
return logLevel;
}
/**
* Optional. The level of logs to write to Cloud Logging for this job.Debug-level logs provide
* highly-detailed information for troubleshooting, but their increased verbosity could incur
* additional costs (https://cloud.google.com/stackdriver/pricing) that might not be merited for
* all jobs.If unspecified, defaults to INFO.
* @param logLevel logLevel or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpec setLogLevel(java.lang.String logLevel) {
this.logLevel = logLevel;
return this;
}
/**
* Required. A boundary on the scope of impact that the metadata import job can have.
* @return value or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope getScope() {
return scope;
}
/**
* Required. A boundary on the scope of impact that the metadata import job can have.
* @param scope scope or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpec setScope(GoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope scope) {
this.scope = scope;
return this;
}
/**
* Optional. The time when the process that created the metadata import files began.
* @return value or {@code null} for none
*/
public String getSourceCreateTime() {
return sourceCreateTime;
}
/**
* Optional. The time when the process that created the metadata import files began.
* @param sourceCreateTime sourceCreateTime or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpec setSourceCreateTime(String sourceCreateTime) {
this.sourceCreateTime = sourceCreateTime;
return this;
}
/**
* Optional. The URI of a Cloud Storage bucket or folder (beginning with gs:// and ending with /)
* that contains the metadata import files for this job.A metadata import file defines the values
* to set for each of the entries and aspects in a metadata job. For more information about how to
* create a metadata import file and the file requirements, see Metadata import file
* (https://cloud.google.com/dataplex/docs/import-metadata#metadata-import-file).You can provide
* multiple metadata import files in the same metadata job. The bucket or folder must contain at
* least one metadata import file, in JSON Lines format (either .json or .jsonl file extension).In
* FULL entry sync mode, don't save the metadata import file in a folder named
* SOURCE_STORAGE_URI/deletions/.Caution: If the metadata import file contains no data, all
* entries and aspects that belong to the job's scope are deleted.
* @return value or {@code null} for none
*/
public java.lang.String getSourceStorageUri() {
return sourceStorageUri;
}
/**
* Optional. The URI of a Cloud Storage bucket or folder (beginning with gs:// and ending with /)
* that contains the metadata import files for this job.A metadata import file defines the values
* to set for each of the entries and aspects in a metadata job. For more information about how to
* create a metadata import file and the file requirements, see Metadata import file
* (https://cloud.google.com/dataplex/docs/import-metadata#metadata-import-file).You can provide
* multiple metadata import files in the same metadata job. The bucket or folder must contain at
* least one metadata import file, in JSON Lines format (either .json or .jsonl file extension).In
* FULL entry sync mode, don't save the metadata import file in a folder named
* SOURCE_STORAGE_URI/deletions/.Caution: If the metadata import file contains no data, all
* entries and aspects that belong to the job's scope are deleted.
* @param sourceStorageUri sourceStorageUri or {@code null} for none
*/
public GoogleCloudDataplexV1MetadataJobImportJobSpec setSourceStorageUri(java.lang.String sourceStorageUri) {
this.sourceStorageUri = sourceStorageUri;
return this;
}
@Override
public GoogleCloudDataplexV1MetadataJobImportJobSpec set(String fieldName, Object value) {
return (GoogleCloudDataplexV1MetadataJobImportJobSpec) super.set(fieldName, value);
}
@Override
public GoogleCloudDataplexV1MetadataJobImportJobSpec clone() {
return (GoogleCloudDataplexV1MetadataJobImportJobSpec) super.clone();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy