com.google.cloud.retail.v2.BigQuerySourceOrBuilder Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-retail-v2 Show documentation
Show all versions of proto-google-cloud-retail-v2 Show documentation
Proto library for google-cloud-retail
The newest version!
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/import_config.proto
// Protobuf Java Version: 3.25.5
package com.google.cloud.retail.v2;
public interface BigQuerySourceOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.retail.v2.BigQuerySource)
com.google.protobuf.MessageOrBuilder {
/**
*
*
*
* BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
*
* Only supported in
* [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
*
*
* .google.type.Date partition_date = 6;
*
* @return Whether the partitionDate field is set.
*/
boolean hasPartitionDate();
/**
*
*
*
* BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
*
* Only supported in
* [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
*
*
* .google.type.Date partition_date = 6;
*
* @return The partitionDate.
*/
com.google.type.Date getPartitionDate();
/**
*
*
*
* BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
*
* Only supported in
* [ImportProductsRequest][google.cloud.retail.v2.ImportProductsRequest].
*
*
* .google.type.Date partition_date = 6;
*/
com.google.type.DateOrBuilder getPartitionDateOrBuilder();
/**
*
*
*
* The project ID (can be project # or ID) that the BigQuery source is in with
* a length limit of 128 characters. If not specified, inherits the project
* ID from the parent request.
*
*
* string project_id = 5;
*
* @return The projectId.
*/
java.lang.String getProjectId();
/**
*
*
*
* The project ID (can be project # or ID) that the BigQuery source is in with
* a length limit of 128 characters. If not specified, inherits the project
* ID from the parent request.
*
*
* string project_id = 5;
*
* @return The bytes for projectId.
*/
com.google.protobuf.ByteString getProjectIdBytes();
/**
*
*
*
* Required. The BigQuery data set to copy the data from with a length limit
* of 1,024 characters.
*
*
* string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return The datasetId.
*/
java.lang.String getDatasetId();
/**
*
*
*
* Required. The BigQuery data set to copy the data from with a length limit
* of 1,024 characters.
*
*
* string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for datasetId.
*/
com.google.protobuf.ByteString getDatasetIdBytes();
/**
*
*
*
* Required. The BigQuery table to copy the data from with a length limit of
* 1,024 characters.
*
*
* string table_id = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The tableId.
*/
java.lang.String getTableId();
/**
*
*
*
* Required. The BigQuery table to copy the data from with a length limit of
* 1,024 characters.
*
*
* string table_id = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for tableId.
*/
com.google.protobuf.ByteString getTableIdBytes();
/**
*
*
*
* Intermediate Cloud Storage directory used for the import with a length
* limit of 2,000 characters. Can be specified if one wants to have the
* BigQuery export to a specific Cloud Storage directory.
*
*
* string gcs_staging_dir = 3;
*
* @return The gcsStagingDir.
*/
java.lang.String getGcsStagingDir();
/**
*
*
*
* Intermediate Cloud Storage directory used for the import with a length
* limit of 2,000 characters. Can be specified if one wants to have the
* BigQuery export to a specific Cloud Storage directory.
*
*
* string gcs_staging_dir = 3;
*
* @return The bytes for gcsStagingDir.
*/
com.google.protobuf.ByteString getGcsStagingDirBytes();
/**
*
*
*
* The schema to use when parsing the data from the source.
*
* Supported values for product imports:
*
* * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
* per line. Each product must
* have a valid [Product.id][google.cloud.retail.v2.Product.id].
* * `product_merchant_center`: See [Importing catalog data from Merchant
* Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
*
* Supported values for user events imports:
*
* * `user_event` (default): One JSON
* [UserEvent][google.cloud.retail.v2.UserEvent] per line.
* * `user_event_ga360`:
* The schema is available here:
* https://support.google.com/analytics/answer/3437719.
* * `user_event_ga4`:
* The schema is available here:
* https://support.google.com/analytics/answer/7029846.
*
* Supported values for autocomplete imports:
*
* * `suggestions` (default): One JSON completion suggestion per line.
* * `denylist`: One JSON deny suggestion per line.
* * `allowlist`: One JSON allow suggestion per line.
*
*
* string data_schema = 4;
*
* @return The dataSchema.
*/
java.lang.String getDataSchema();
/**
*
*
*
* The schema to use when parsing the data from the source.
*
* Supported values for product imports:
*
* * `product` (default): One JSON [Product][google.cloud.retail.v2.Product]
* per line. Each product must
* have a valid [Product.id][google.cloud.retail.v2.Product.id].
* * `product_merchant_center`: See [Importing catalog data from Merchant
* Center](https://cloud.google.com/retail/recommendations-ai/docs/upload-catalog#mc).
*
* Supported values for user events imports:
*
* * `user_event` (default): One JSON
* [UserEvent][google.cloud.retail.v2.UserEvent] per line.
* * `user_event_ga360`:
* The schema is available here:
* https://support.google.com/analytics/answer/3437719.
* * `user_event_ga4`:
* The schema is available here:
* https://support.google.com/analytics/answer/7029846.
*
* Supported values for autocomplete imports:
*
* * `suggestions` (default): One JSON completion suggestion per line.
* * `denylist`: One JSON deny suggestion per line.
* * `allowlist`: One JSON allow suggestion per line.
*
*
* string data_schema = 4;
*
* @return The bytes for dataSchema.
*/
com.google.protobuf.ByteString getDataSchemaBytes();
com.google.cloud.retail.v2.BigQuerySource.PartitionCase getPartitionCase();
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy