diff --git a/src/main/java/io/cdap/plugin/gcp/bigquery/source/AvroBigQueryInputFormatWithScopes.java b/src/main/java/io/cdap/plugin/gcp/bigquery/source/AvroBigQueryInputFormatWithScopes.java new file mode 100644 index 0000000000..0a0e112741 --- /dev/null +++ b/src/main/java/io/cdap/plugin/gcp/bigquery/source/AvroBigQueryInputFormatWithScopes.java @@ -0,0 +1,46 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.gcp.bigquery.source; + +import com.google.cloud.hadoop.io.bigquery.AvroBigQueryInputFormat; +import com.google.cloud.hadoop.io.bigquery.BigQueryFactory; +import com.google.cloud.hadoop.io.bigquery.BigQueryHelper; +import io.cdap.plugin.gcp.common.GCPUtils; +import org.apache.hadoop.conf.Configuration; + +import java.io.IOException; +import java.security.GeneralSecurityException; + +/** + * Override for {@link AvroBigQueryInputFormat} with additional scopes for External tables. + */ +public class AvroBigQueryInputFormatWithScopes extends AvroBigQueryInputFormat { + + /** + * Override to support additonal scopes, useful when exporting from external tables + * + * @param config Hadoop config + * @return BigQuery Helper instance + * @throws IOException on IO Error. + * @throws GeneralSecurityException on security exception. + */ + @Override + protected BigQueryHelper getBigQueryHelper(Configuration config) throws GeneralSecurityException, IOException { + BigQueryFactory factory = new BigQueryFactoryWithScopes(GCPUtils.BIGQUERY_SCOPES); + return factory.getBigQueryHelper(config); + } +} diff --git a/src/main/java/io/cdap/plugin/gcp/bigquery/source/PartitionedBigQueryInputFormat.java b/src/main/java/io/cdap/plugin/gcp/bigquery/source/PartitionedBigQueryInputFormat.java index c70e1142f9..f9a938aa4f 100644 --- a/src/main/java/io/cdap/plugin/gcp/bigquery/source/PartitionedBigQueryInputFormat.java +++ b/src/main/java/io/cdap/plugin/gcp/bigquery/source/PartitionedBigQueryInputFormat.java @@ -1,3 +1,19 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package io.cdap.plugin.gcp.bigquery.source; import com.google.api.services.bigquery.model.Job; @@ -10,7 +26,6 @@ import com.google.cloud.bigquery.TableDefinition.Type; import com.google.cloud.bigquery.TimePartitioning; import com.google.cloud.hadoop.io.bigquery.AbstractBigQueryInputFormat; -import com.google.cloud.hadoop.io.bigquery.AvroBigQueryInputFormat; import com.google.cloud.hadoop.io.bigquery.AvroRecordReader; import com.google.cloud.hadoop.io.bigquery.BigQueryConfiguration; import com.google.cloud.hadoop.io.bigquery.BigQueryFactory; @@ -51,7 +66,7 @@ public class PartitionedBigQueryInputFormat extends AbstractBigQueryInputFormat { private InputFormat delegateInputFormat = - new AvroBigQueryInputFormat(); + new AvroBigQueryInputFormatWithScopes(); @Override public ExportFileFormat getExportFileFormat() {