Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Copyright 2015 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery;
import static com.google.cloud.RetryHelper.runWithRetries;
import static com.google.cloud.bigquery.PolicyHelper.convertFromApiPolicy;
import static com.google.cloud.bigquery.PolicyHelper.convertToApiPolicy;
import static com.google.common.base.Preconditions.checkArgument;
import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
import com.google.api.core.BetaApi;
import com.google.api.core.InternalApi;
import com.google.api.gax.paging.Page;
import com.google.api.services.bigquery.model.ErrorProto;
import com.google.api.services.bigquery.model.GetQueryResultsResponse;
import com.google.api.services.bigquery.model.QueryRequest;
import com.google.api.services.bigquery.model.TableDataInsertAllRequest;
import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows;
import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
import com.google.api.services.bigquery.model.TableDataList;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.cloud.BaseService;
import com.google.cloud.PageImpl;
import com.google.cloud.PageImpl.NextPageFetcher;
import com.google.cloud.Policy;
import com.google.cloud.RetryHelper;
import com.google.cloud.RetryHelper.RetryHelperException;
import com.google.cloud.Tuple;
import com.google.cloud.bigquery.InsertAllRequest.RowToInsert;
import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode;
import com.google.cloud.bigquery.spi.v2.BigQueryRpc;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.threeten.bp.Instant;
import org.threeten.bp.temporal.ChronoUnit;
final class BigQueryImpl extends BaseService implements BigQuery {
private static class DatasetPageFetcher implements NextPageFetcher {
private static final long serialVersionUID = -3057564042439021278L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
private final String projectId;
DatasetPageFetcher(
String projectId,
BigQueryOptions serviceOptions,
String cursor,
Map optionMap) {
this.projectId = projectId;
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
}
@Override
public Page getNextPage() {
return listDatasets(projectId, serviceOptions, requestOptions);
}
}
private static class TablePageFetcher implements NextPageFetcher
{
private static final long serialVersionUID = 8611248840504201187L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
private final DatasetId datasetId;
TablePageFetcher(
DatasetId datasetId,
BigQueryOptions serviceOptions,
String cursor,
Map optionMap) {
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
this.datasetId = datasetId;
}
@Override
public Page
getNextPage() {
return listTables(datasetId, serviceOptions, requestOptions);
}
}
private static class ModelPageFetcher implements NextPageFetcher {
private static final long serialVersionUID = 8611248811504201187L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
private final DatasetId datasetId;
ModelPageFetcher(
DatasetId datasetId,
BigQueryOptions serviceOptions,
String cursor,
Map optionMap) {
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
this.datasetId = datasetId;
}
@Override
public Page getNextPage() {
return listModels(datasetId, serviceOptions, requestOptions);
}
}
private static class RoutinePageFetcher implements NextPageFetcher {
private static final long serialVersionUID = 8611242311504201187L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
private final DatasetId datasetId;
RoutinePageFetcher(
DatasetId datasetId,
BigQueryOptions serviceOptions,
String cursor,
Map optionMap) {
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
this.datasetId = datasetId;
}
@Override
public Page getNextPage() {
return listRoutines(datasetId, serviceOptions, requestOptions);
}
}
private static class JobPageFetcher implements NextPageFetcher {
private static final long serialVersionUID = 8536533282558245472L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
JobPageFetcher(
BigQueryOptions serviceOptions, String cursor, Map optionMap) {
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
}
@Override
public Page getNextPage() {
return listJobs(serviceOptions, requestOptions);
}
}
private static class TableDataPageFetcher implements NextPageFetcher {
private static final long serialVersionUID = -8501991114794410114L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
private final TableId table;
private final Schema schema;
TableDataPageFetcher(
TableId table,
Schema schema,
BigQueryOptions serviceOptions,
String cursor,
Map optionMap) {
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
this.table = table;
this.schema = schema;
}
@Override
public Page getNextPage() {
return listTableData(table, schema, serviceOptions, requestOptions).x();
}
}
private class QueryPageFetcher extends Thread implements NextPageFetcher {
private static final long serialVersionUID = -8501991114794410114L;
private final Map requestOptions;
private final BigQueryOptions serviceOptions;
private Job job;
private final TableId table;
private final Schema schema;
QueryPageFetcher(
JobId jobId,
Schema schema,
BigQueryOptions serviceOptions,
String cursor,
Map optionMap) {
this.requestOptions =
PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
this.serviceOptions = serviceOptions;
this.job = getJob(jobId);
this.table = ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable();
this.schema = schema;
}
@Override
public Page getNextPage() {
while (!JobStatus.State.DONE.equals(job.getStatus().getState())) {
try {
sleep(5000);
} catch (InterruptedException ex) {
throw new RuntimeException(ex.getMessage());
}
job = job.reload();
}
return listTableData(table, schema, serviceOptions, requestOptions).x();
}
}
private final BigQueryRpc bigQueryRpc;
private static final BigQueryRetryConfig DEFAULT_RETRY_CONFIG =
BigQueryRetryConfig.newBuilder()
.retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG)
.retryOnMessage(BigQueryErrorMessages.JOB_RATE_LIMIT_EXCEEDED_MSG)
.retryOnRegEx(BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX)
.build(); // retry config with Error Messages and RegEx for RateLimitExceeded Error
BigQueryImpl(BigQueryOptions options) {
super(options);
bigQueryRpc = options.getBigQueryRpcV2();
}
@Override
public Dataset create(DatasetInfo datasetInfo, DatasetOption... options) {
final com.google.api.services.bigquery.model.Dataset datasetPb =
datasetInfo
.setProjectId(
Strings.isNullOrEmpty(datasetInfo.getDatasetId().getProject())
? getOptions().getProjectId()
: datasetInfo.getDatasetId().getProject())
.toPb();
final Map optionsMap = optionMap(options);
try {
return Dataset.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Dataset call() {
return bigQueryRpc.create(datasetPb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Table create(TableInfo tableInfo, TableOption... options) {
final com.google.api.services.bigquery.model.Table tablePb =
tableInfo
.setProjectId(
Strings.isNullOrEmpty(tableInfo.getTableId().getProject())
? getOptions().getProjectId()
: tableInfo.getTableId().getProject())
.toPb();
handleExternalTableSchema(tablePb);
final Map optionsMap = optionMap(options);
try {
return Table.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Table call() {
return bigQueryRpc.create(tablePb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
private void handleExternalTableSchema(
final com.google.api.services.bigquery.model.Table tablePb) {
// Set schema on the Table for permanent external table
if (tablePb.getExternalDataConfiguration() != null) {
tablePb.setSchema(tablePb.getExternalDataConfiguration().getSchema());
// clear table schema on ExternalDataConfiguration
tablePb.getExternalDataConfiguration().setSchema(null);
}
}
@Override
public Routine create(RoutineInfo routineInfo, RoutineOption... options) {
final com.google.api.services.bigquery.model.Routine routinePb =
routineInfo
.setProjectId(
Strings.isNullOrEmpty(routineInfo.getRoutineId().getProject())
? getOptions().getProjectId()
: routineInfo.getRoutineId().getProject())
.toPb();
final Map optionsMap = optionMap(options);
try {
return Routine.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Routine call() {
return bigQueryRpc.create(routinePb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Job create(JobInfo jobInfo, JobOption... options) {
Supplier idProvider =
new Supplier() {
@Override
public JobId get() {
return JobId.of();
}
};
return create(jobInfo, idProvider, options);
}
@Override
@BetaApi
public Connection createConnection(@NonNull ConnectionSettings connectionSettings)
throws BigQueryException {
return new ConnectionImpl(connectionSettings, getOptions(), bigQueryRpc, DEFAULT_RETRY_CONFIG);
}
@Override
@BetaApi
public Connection createConnection() throws BigQueryException {
ConnectionSettings defaultConnectionSettings = ConnectionSettings.newBuilder().build();
return new ConnectionImpl(
defaultConnectionSettings, getOptions(), bigQueryRpc, DEFAULT_RETRY_CONFIG);
}
@InternalApi("visible for testing")
Job create(JobInfo jobInfo, Supplier idProvider, JobOption... options) {
final boolean idRandom = (jobInfo.getJobId() == null);
final Map optionsMap = optionMap(options);
BigQueryException createException;
// NOTE(pongad): This double-try structure is admittedly odd.
// translateAndThrow itself throws, and pretends to return an exception only
// so users can pretend to throw.
// This makes it difficult to translate without throwing.
// Fixing this entails some work on BaseServiceException.translate.
// Since that affects a bunch of APIs, we should fix this as a separate change.
final JobId[] finalJobId = new JobId[1];
try {
try {
return Job.fromPb(
this,
BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Job call() {
if (idRandom) {
// re-generate a new random job with the same jobInfo when jobId is not
// provided by the user
JobInfo recreatedJobInfo =
jobInfo.toBuilder().setJobId(idProvider.get()).build();
com.google.api.services.bigquery.model.Job newJobPb =
recreatedJobInfo.setProjectId(getOptions().getProjectId()).toPb();
finalJobId[0] = recreatedJobInfo.getJobId();
return bigQueryRpc.create(newJobPb, optionsMap);
} else {
com.google.api.services.bigquery.model.Job jobPb =
jobInfo.setProjectId(getOptions().getProjectId()).toPb();
return bigQueryRpc.create(jobPb, optionsMap);
}
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock(),
DEFAULT_RETRY_CONFIG));
} catch (BigQueryRetryHelper.BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
} catch (BigQueryException e) {
createException = e;
}
if (!idRandom) {
if (createException instanceof BigQueryException && createException.getCause() != null) {
/*GoogleJsonResponseException createExceptionCause =
(GoogleJsonResponseException) createException.getCause();*/
Pattern pattern = Pattern.compile(".*Already.*Exists:.*Job.*", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(createException.getCause().getMessage());
if (matcher.find()) {
// If the Job ALREADY EXISTS, retrieve it.
Job job = this.getJob(jobInfo.getJobId(), JobOption.fields(JobField.STATISTICS));
long jobCreationTime = job.getStatistics().getCreationTime();
long jobMinStaleTime = System.currentTimeMillis();
long jobMaxStaleTime =
Instant.ofEpochMilli(jobMinStaleTime).minus(1, ChronoUnit.DAYS).toEpochMilli();
// Only return the job if it has been created in the past 24 hours.
// This is assuming any job older than 24 hours is a valid duplicate JobID
// and not a false positive like b/290419183
if (jobCreationTime >= jobMaxStaleTime && jobCreationTime <= jobMinStaleTime) {
return job;
}
}
}
throw createException;
}
// If create RPC fails, it's still possible that the job has been successfully
// created, and get might work.
// We can only do this if we randomly generated the ID. Otherwise we might
// mistakenly fetch a job created by someone else.
Job job;
try {
job = getJob(finalJobId[0]);
} catch (BigQueryException e) {
throw createException;
}
if (job == null) {
throw createException;
}
return job;
}
@Override
public Dataset getDataset(String datasetId, DatasetOption... options) {
return getDataset(DatasetId.of(datasetId), options);
}
@Override
public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) {
final DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId());
final Map optionsMap = optionMap(options);
try {
com.google.api.services.bigquery.model.Dataset answer =
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Dataset call() {
return bigQueryRpc.getDataset(
completeDatasetId.getProject(), completeDatasetId.getDataset(), optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
if (getOptions().getThrowNotFound() && answer == null) {
throw new BigQueryException(HTTP_NOT_FOUND, "Dataset not found");
}
return answer == null ? null : Dataset.fromPb(this, answer);
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Page listDatasets(DatasetListOption... options) {
return listDatasets(getOptions().getProjectId(), options);
}
@Override
public Page listDatasets(String projectId, DatasetListOption... options) {
return listDatasets(projectId, getOptions(), optionMap(options));
}
private static Page listDatasets(
final String projectId,
final BigQueryOptions serviceOptions,
final Map optionsMap) {
try {
Tuple> result =
runWithRetries(
new Callable<
Tuple>>() {
@Override
public Tuple>
call() {
return serviceOptions.getBigQueryRpcV2().listDatasets(projectId, optionsMap);
}
},
serviceOptions.getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
serviceOptions.getClock());
String cursor = result.x();
return new PageImpl<>(
new DatasetPageFetcher(projectId, serviceOptions, cursor, optionsMap),
cursor,
Iterables.transform(
result.y(),
new Function() {
@Override
public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) {
return Dataset.fromPb(serviceOptions.getService(), dataset);
}
}));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public boolean delete(String datasetId, DatasetDeleteOption... options) {
return delete(DatasetId.of(datasetId), options);
}
@Override
public boolean delete(DatasetId datasetId, DatasetDeleteOption... options) {
final DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId());
final Map optionsMap = optionMap(options);
try {
return runWithRetries(
new Callable() {
@Override
public Boolean call() {
return bigQueryRpc.deleteDataset(
completeDatasetId.getProject(), completeDatasetId.getDataset(), optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public boolean delete(String datasetId, String tableId) {
return delete(TableId.of(datasetId, tableId));
}
@Override
public boolean delete(TableId tableId) {
final TableId completeTableId =
tableId.setProjectId(
Strings.isNullOrEmpty(tableId.getProject())
? getOptions().getProjectId()
: tableId.getProject());
try {
return runWithRetries(
new Callable() {
@Override
public Boolean call() {
return bigQueryRpc.deleteTable(
completeTableId.getProject(),
completeTableId.getDataset(),
completeTableId.getTable());
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public boolean delete(ModelId modelId) {
final ModelId completeModelId =
modelId.setProjectId(
Strings.isNullOrEmpty(modelId.getProject())
? getOptions().getProjectId()
: modelId.getProject());
try {
return runWithRetries(
new Callable() {
@Override
public Boolean call() {
return bigQueryRpc.deleteModel(
completeModelId.getProject(),
completeModelId.getDataset(),
completeModelId.getModel());
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public boolean delete(RoutineId routineId) {
final RoutineId completeRoutineId =
routineId.setProjectId(
Strings.isNullOrEmpty(routineId.getProject())
? getOptions().getProjectId()
: routineId.getProject());
try {
return runWithRetries(
new Callable() {
@Override
public Boolean call() {
return bigQueryRpc.deleteRoutine(
completeRoutineId.getProject(),
completeRoutineId.getDataset(),
completeRoutineId.getRoutine());
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public boolean delete(JobId jobId) {
final JobId completeJobId =
jobId.setProjectId(
Strings.isNullOrEmpty(jobId.getProject())
? getOptions().getProjectId()
: jobId.getProject());
try {
return runWithRetries(
new Callable() {
@Override
public Boolean call() {
return bigQueryRpc.deleteJob(
completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation());
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Dataset update(DatasetInfo datasetInfo, DatasetOption... options) {
final com.google.api.services.bigquery.model.Dataset datasetPb =
datasetInfo.setProjectId(getOptions().getProjectId()).toPb();
final Map optionsMap = optionMap(options);
try {
return Dataset.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Dataset call() {
return bigQueryRpc.patch(datasetPb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Table update(TableInfo tableInfo, TableOption... options) {
final com.google.api.services.bigquery.model.Table tablePb =
tableInfo
.setProjectId(
Strings.isNullOrEmpty(tableInfo.getTableId().getProject())
? getOptions().getProjectId()
: tableInfo.getTableId().getProject())
.toPb();
handleExternalTableSchema(tablePb);
final Map optionsMap = optionMap(options);
try {
return Table.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Table call() {
return bigQueryRpc.patch(tablePb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Model update(ModelInfo modelInfo, ModelOption... options) {
final com.google.api.services.bigquery.model.Model modelPb =
modelInfo
.setProjectId(
Strings.isNullOrEmpty(modelInfo.getModelId().getProject())
? getOptions().getProjectId()
: modelInfo.getModelId().getProject())
.toPb();
final Map optionsMap = optionMap(options);
try {
return Model.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Model call() {
return bigQueryRpc.patch(modelPb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Routine update(RoutineInfo routineInfo, RoutineOption... options) {
final com.google.api.services.bigquery.model.Routine routinePb =
routineInfo
.setProjectId(
Strings.isNullOrEmpty(routineInfo.getRoutineId().getProject())
? getOptions().getProjectId()
: routineInfo.getRoutineId().getProject())
.toPb();
final Map optionsMap = optionMap(options);
try {
return Routine.fromPb(
this,
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Routine call() {
return bigQueryRpc.update(routinePb, optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock()));
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Table getTable(final String datasetId, final String tableId, TableOption... options) {
return getTable(TableId.of(datasetId, tableId), options);
}
@Override
public Table getTable(TableId tableId, TableOption... options) {
// More context about why this:
// https://github.com/googleapis/google-cloud-java/issues/3808
final TableId completeTableId =
tableId.setProjectId(
Strings.isNullOrEmpty(tableId.getProject())
? getOptions().getProjectId()
: tableId.getProject());
final Map optionsMap = optionMap(options);
try {
com.google.api.services.bigquery.model.Table answer =
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Table call() {
return bigQueryRpc.getTable(
completeTableId.getProject(),
completeTableId.getDataset(),
completeTableId.getTable(),
optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
if (getOptions().getThrowNotFound() && answer == null) {
throw new BigQueryException(HTTP_NOT_FOUND, "Table not found");
}
return answer == null ? null : Table.fromPb(this, answer);
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Model getModel(String datasetId, String modelId, ModelOption... options) {
return getModel(ModelId.of(datasetId, modelId), options);
}
@Override
public Model getModel(ModelId modelId, ModelOption... options) {
final ModelId completeModelId =
modelId.setProjectId(
Strings.isNullOrEmpty(modelId.getProject())
? getOptions().getProjectId()
: modelId.getProject());
final Map optionsMap = optionMap(options);
try {
com.google.api.services.bigquery.model.Model answer =
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Model call() {
return bigQueryRpc.getModel(
completeModelId.getProject(),
completeModelId.getDataset(),
completeModelId.getModel(),
optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
if (getOptions().getThrowNotFound() && answer == null) {
throw new BigQueryException(HTTP_NOT_FOUND, "Model not found");
}
return answer == null ? null : Model.fromPb(this, answer);
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Routine getRoutine(String datasetId, String routineId, RoutineOption... options) {
return getRoutine(RoutineId.of(datasetId, routineId), options);
}
@Override
public Routine getRoutine(RoutineId routineId, RoutineOption... options) {
final RoutineId completeRoutineId =
routineId.setProjectId(
Strings.isNullOrEmpty(routineId.getProject())
? getOptions().getProjectId()
: routineId.getProject());
final Map optionsMap = optionMap(options);
try {
com.google.api.services.bigquery.model.Routine answer =
runWithRetries(
new Callable() {
@Override
public com.google.api.services.bigquery.model.Routine call() {
return bigQueryRpc.getRoutine(
completeRoutineId.getProject(),
completeRoutineId.getDataset(),
completeRoutineId.getRoutine(),
optionsMap);
}
},
getOptions().getRetrySettings(),
BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
getOptions().getClock());
if (getOptions().getThrowNotFound() && answer == null) {
throw new BigQueryException(HTTP_NOT_FOUND, "Routine not found");
}
return answer == null ? null : Routine.fromPb(this, answer);
} catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@Override
public Page