Skip to content

Commit

Permalink
Merge pull request #1477 from data-integrations/CDAP-20931
Browse files Browse the repository at this point in the history
[CDAP-20931] Add errorCodeType, errorCode & supportedDocURL in exception
  • Loading branch information
itsankit-google authored Dec 16, 2024
2 parents 228ece3 + 02b04b4 commit c0fd067
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package io.cdap.plugin.gcp.bigquery.common;

import io.cdap.plugin.gcp.common.GCPErrorDetailsProvider;
import io.cdap.plugin.gcp.common.GCPUtils;

/**
* A custom ErrorDetailsProvider for BigQuery plugins.
Expand All @@ -25,6 +26,6 @@ public class BigQueryErrorDetailsProvider extends GCPErrorDetailsProvider {

@Override
protected String getExternalDocumentationLink() {
return "https://cloud.google.com/bigquery/docs/error-messages";
return GCPUtils.BQ_SUPPORTED_DOC_URL;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import com.google.gson.Gson;
import io.cdap.cdap.api.data.schema.Schema;
import io.cdap.cdap.api.exception.ErrorCategory;
import io.cdap.cdap.api.exception.ErrorCodeType;
import io.cdap.cdap.api.exception.ErrorType;
import io.cdap.cdap.api.exception.ErrorUtils;
import io.cdap.cdap.etl.api.FailureCollector;
Expand Down Expand Up @@ -196,10 +197,12 @@ private static void createDataset(BigQuery bigQuery, DatasetId dataset, @Nullabl
// This most likely means multiple stages in the same pipeline are trying to create the same dataset.
// Ignore this and move on, since all that matters is that the dataset exists.
ErrorUtils.ActionErrorPair pair = ErrorUtils.getActionErrorByStatusCode(e.getCode());
String errorReason = String.format("%s %s %s", e.getCode(), e.getMessage(), pair.getCorrectiveAction());
String errorReason = String.format("%s %s %s For more details, see %s", e.getCode(),
e.getMessage(), pair.getCorrectiveAction(), GCPUtils.BQ_SUPPORTED_DOC_URL);
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorReason, errorMessage.get(),
pair.getErrorType(), true, e);
pair.getErrorType(), true, ErrorCodeType.HTTP, String.valueOf(e.getCode()),
GCPUtils.BQ_SUPPORTED_DOC_URL, e);
}
}
}
Expand Down Expand Up @@ -244,10 +247,12 @@ private static void createBucket(Storage storage, String bucket, @Nullable Strin
// This most likely means multiple stages in the same pipeline are trying to create the same dataset.
// Ignore this and move on, since all that matters is that the dataset exists.
ErrorUtils.ActionErrorPair pair = ErrorUtils.getActionErrorByStatusCode(e.getCode());
String errorReason = String.format("%s %s %s", e.getCode(), e.getMessage(), pair.getCorrectiveAction());
String errorReason = String.format("%s %s %s For more details, see %s", e.getCode(),
e.getMessage(), pair.getCorrectiveAction(), GCPUtils.GCS_SUPPORTED_DOC_URL);
throw ErrorUtils.getProgramFailureException(
new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN), errorReason, errorMessage.get(),
pair.getErrorType(), true, e);
pair.getErrorType(), true, ErrorCodeType.HTTP, String.valueOf(e.getCode()),
GCPUtils.GCS_SUPPORTED_DOC_URL, e);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageException;
import io.cdap.cdap.api.exception.ErrorCategory;
import io.cdap.cdap.api.exception.ErrorCodeType;
import io.cdap.cdap.api.exception.ErrorType;
import io.cdap.cdap.api.exception.ErrorUtils;
import io.cdap.plugin.gcp.bigquery.connector.BigQueryConnectorConfig;
Expand Down Expand Up @@ -96,12 +97,14 @@ public static String getOrCreateBucket(Configuration configuration,
// Ignore this and move on, since all that matters is that the bucket exists.
return bucket;
}
String errorMessage = String.format("Unable to create Cloud Storage bucket '%s' in the same " +
"location ('%s') as BigQuery dataset '%s'. " + "Please use a bucket " +
"that is in the same location as the dataset.",
bucket, dataset.getLocation(), dataset.getDatasetId().getDataset());
String errorMessage = String.format("Unable to create Cloud Storage bucket '%s' in the same "
+ "location ('%s') as BigQuery dataset '%s'. " + "Please use a bucket "
+ "that is in the same location as the dataset. For more details, see %s",
bucket, dataset.getLocation(), dataset.getDatasetId().getDataset(),
GCPUtils.GCS_SUPPORTED_DOC_URL);
throw ErrorUtils.getProgramFailureException(new ErrorCategory(ErrorCategory.ErrorCategoryEnum.PLUGIN),
errorMessage, e.getMessage(), ErrorType.USER, true, e);
errorMessage, e.getMessage(), ErrorType.USER, true, ErrorCodeType.HTTP,
String.valueOf(e.getCode()), GCPUtils.GCS_SUPPORTED_DOC_URL, e);
}
}

Expand Down
2 changes: 2 additions & 0 deletions src/main/java/io/cdap/plugin/gcp/common/GCPUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ public class GCPUtils {
"https://www.googleapis.com/auth/bigquery");
public static final String FQN_RESERVED_CHARACTERS_PATTERN = ".*[.:` \t\n].*";
public static final int MILLISECONDS_MULTIPLIER = 1000;
public static final String GCS_SUPPORTED_DOC_URL = "https://cloud.google.com/storage/docs/json_api/v1/status-codes";
public static final String BQ_SUPPORTED_DOC_URL = "https://cloud.google.com/bigquery/docs/error-messages";

/**
* Load a service account from the local file system.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package io.cdap.plugin.gcp.gcs;

import io.cdap.plugin.gcp.common.GCPErrorDetailsProvider;
import io.cdap.plugin.gcp.common.GCPUtils;

/**
* A custom ErrorDetailsProvider for GCS plugins.
Expand All @@ -25,6 +26,6 @@ public class GCSErrorDetailsProvider extends GCPErrorDetailsProvider {

@Override
protected String getExternalDocumentationLink() {
return "https://cloud.google.com/storage/docs/json_api/v1/status-codes";
return GCPUtils.GCS_SUPPORTED_DOC_URL;
}
}

0 comments on commit c0fd067

Please sign in to comment.