Skip to content

Commit

Permalink
Merge pull request data-integrations#1309 from data-integrations/PLUG…
Browse files Browse the repository at this point in the history
…IN-1687

[PLUGIN-1687] Override AvroBigQueryInputFormat with additional scopes for external tables
  • Loading branch information
itsankit-google authored Sep 25, 2023
2 parents d5c7025 + 42e4128 commit 9517acc
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 2 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.gcp.bigquery.source;

import com.google.cloud.hadoop.io.bigquery.AvroBigQueryInputFormat;
import com.google.cloud.hadoop.io.bigquery.BigQueryFactory;
import com.google.cloud.hadoop.io.bigquery.BigQueryHelper;
import io.cdap.plugin.gcp.common.GCPUtils;
import org.apache.hadoop.conf.Configuration;

import java.io.IOException;
import java.security.GeneralSecurityException;

/**
* Override for {@link AvroBigQueryInputFormat} with additional scopes for External tables.
*/
public class AvroBigQueryInputFormatWithScopes extends AvroBigQueryInputFormat {

/**
* Override to support additonal scopes, useful when exporting from external tables
*
* @param config Hadoop config
* @return BigQuery Helper instance
* @throws IOException on IO Error.
* @throws GeneralSecurityException on security exception.
*/
@Override
protected BigQueryHelper getBigQueryHelper(Configuration config) throws GeneralSecurityException, IOException {
BigQueryFactory factory = new BigQueryFactoryWithScopes(GCPUtils.BIGQUERY_SCOPES);
return factory.getBigQueryHelper(config);
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.gcp.bigquery.source;

import com.google.api.services.bigquery.model.Job;
Expand All @@ -10,7 +26,6 @@
import com.google.cloud.bigquery.TableDefinition.Type;
import com.google.cloud.bigquery.TimePartitioning;
import com.google.cloud.hadoop.io.bigquery.AbstractBigQueryInputFormat;
import com.google.cloud.hadoop.io.bigquery.AvroBigQueryInputFormat;
import com.google.cloud.hadoop.io.bigquery.AvroRecordReader;
import com.google.cloud.hadoop.io.bigquery.BigQueryConfiguration;
import com.google.cloud.hadoop.io.bigquery.BigQueryFactory;
Expand Down Expand Up @@ -51,7 +66,7 @@
public class PartitionedBigQueryInputFormat extends AbstractBigQueryInputFormat<LongWritable, GenericData.Record> {

private InputFormat<LongWritable, GenericData.Record> delegateInputFormat =
new AvroBigQueryInputFormat();
new AvroBigQueryInputFormatWithScopes();

@Override
public ExportFileFormat getExportFileFormat() {
Expand Down

0 comments on commit 9517acc

Please sign in to comment.