diff --git a/yb-voyager/cmd/analyzeSchema.go b/yb-voyager/cmd/analyzeSchema.go index d0dc86e9a..a68a8877b 100644 --- a/yb-voyager/cmd/analyzeSchema.go +++ b/yb-voyager/cmd/analyzeSchema.go @@ -623,7 +623,7 @@ func convertIssueInstanceToAnalyzeIssue(issueInstance queryissue.QueryIssue, fil issueType := UNSUPPORTED_FEATURES switch true { case isPlPgSQLIssue: - issueType = UNSUPPORTED_PLPGSQL_OBEJCTS + issueType = UNSUPPORTED_PLPGSQL_OBJECTS case slices.ContainsFunc(MigrationCaveatsIssues, func(i string) bool { //Adding the MIGRATION_CAVEATS issueType of the utils.Issue for these issueInstances in MigrationCaveatsIssues return strings.Contains(issueInstance.TypeName, i) diff --git a/yb-voyager/cmd/assessMigrationCommand.go b/yb-voyager/cmd/assessMigrationCommand.go index 0493fb3f5..7e6e449b0 100644 --- a/yb-voyager/cmd/assessMigrationCommand.go +++ b/yb-voyager/cmd/assessMigrationCommand.go @@ -31,6 +31,7 @@ import ( "syscall" "text/template" + "github.com/davecgh/go-spew/spew" "github.com/fatih/color" "github.com/samber/lo" log "github.com/sirupsen/logrus" @@ -38,6 +39,7 @@ import ( "golang.org/x/exp/slices" "github.com/yugabyte/yb-voyager/yb-voyager/src/callhome" + "github.com/yugabyte/yb-voyager/yb-voyager/src/constants" "github.com/yugabyte/yb-voyager/yb-voyager/src/cp" "github.com/yugabyte/yb-voyager/yb-voyager/src/metadb" "github.com/yugabyte/yb-voyager/yb-voyager/src/migassessment" @@ -411,6 +413,15 @@ func assessMigration() (err error) { return fmt.Errorf("failed to generate assessment report: %w", err) } + assessmentIssues, err := fetchAllAssessmentIssues() + if err != nil { + log.Errorf("failed to fetch the assessment issues for migration complexity: %v", err) + } + utils.PrintAndLog("number of assessment issues detected: %d\n", len(assessmentIssues)) + for i, issue := range assessmentIssues { + fmt.Printf("AssessmentIssue[%d] = %s\n\n", i, spew.Sdump(issue)) + } + utils.PrintAndLog("Migration assessment completed successfully.") completedEvent := createMigrationAssessmentCompletedEvent() controlPlane.MigrationAssessmentCompleted(completedEvent) @@ -526,8 +537,8 @@ func flattenAssessmentReportToAssessmentIssues(ar AssessmentReport) []Assessment } for _, unsupportedDataType := range ar.UnsupportedDataTypes { issues = append(issues, AssessmentIssueYugabyteD{ - Type: DATATYPE, - TypeDescription: DATATYPE_ISSUE_TYPE_DESCRIPTION, + Type: constants.DATATYPE, + TypeDescription: DATATYPE_CATEGORY_DESCRIPTION, Subtype: unsupportedDataType.DataType, ObjectName: fmt.Sprintf("%s.%s.%s", unsupportedDataType.SchemaName, unsupportedDataType.TableName, unsupportedDataType.ColumnName), SqlStatement: "", @@ -538,8 +549,8 @@ func flattenAssessmentReportToAssessmentIssues(ar AssessmentReport) []Assessment for _, unsupportedFeature := range ar.UnsupportedFeatures { for _, object := range unsupportedFeature.Objects { issues = append(issues, AssessmentIssueYugabyteD{ - Type: FEATURE, - TypeDescription: FEATURE_ISSUE_TYPE_DESCRIPTION, + Type: constants.FEATURE, + TypeDescription: FEATURE_CATEGORY_DESCRIPTION, Subtype: unsupportedFeature.FeatureName, SubtypeDescription: unsupportedFeature.FeatureDescription, // TODO: test payload once we add desc for unsupported features ObjectName: object.ObjectName, @@ -553,8 +564,8 @@ func flattenAssessmentReportToAssessmentIssues(ar AssessmentReport) []Assessment for _, migrationCaveat := range ar.MigrationCaveats { for _, object := range migrationCaveat.Objects { issues = append(issues, AssessmentIssueYugabyteD{ - Type: MIGRATION_CAVEATS, - TypeDescription: MIGRATION_CAVEATS_TYPE_DESCRIPTION, + Type: constants.MIGRATION_CAVEATS, + TypeDescription: MIGRATION_CAVEATS_CATEGORY_DESCRIPTION, Subtype: migrationCaveat.FeatureName, SubtypeDescription: migrationCaveat.FeatureDescription, ObjectName: object.ObjectName, @@ -567,8 +578,8 @@ func flattenAssessmentReportToAssessmentIssues(ar AssessmentReport) []Assessment for _, uqc := range ar.UnsupportedQueryConstructs { issues = append(issues, AssessmentIssueYugabyteD{ - Type: QUERY_CONSTRUCT, - TypeDescription: UNSUPPORTED_QUERY_CONSTRUTS_DESCRIPTION, + Type: constants.QUERY_CONSTRUCT, + TypeDescription: UNSUPPORTED_QUERY_CONSTRUCTS_CATEGORY_DESCRIPTION, Subtype: uqc.ConstructTypeName, SqlStatement: uqc.Query, DocsLink: uqc.DocsLink, @@ -579,8 +590,8 @@ func flattenAssessmentReportToAssessmentIssues(ar AssessmentReport) []Assessment for _, plpgsqlObjects := range ar.UnsupportedPlPgSqlObjects { for _, object := range plpgsqlObjects.Objects { issues = append(issues, AssessmentIssueYugabyteD{ - Type: PLPGSQL_OBJECT, - TypeDescription: UNSUPPPORTED_PLPGSQL_OBJECT_DESCRIPTION, + Type: constants.PLPGSQL_OBJECT, + TypeDescription: UNSUPPPORTED_PLPGSQL_OBJECT_CATEGORY_DESCRIPTION, Subtype: plpgsqlObjects.FeatureName, SubtypeDescription: plpgsqlObjects.FeatureDescription, ObjectName: object.ObjectName, @@ -876,7 +887,9 @@ func generateAssessmentReport() (err error) { return fmt.Errorf("failed to fetch columns with unsupported data types: %w", err) } assessmentReport.UnsupportedDataTypes = unsupportedDataTypes - assessmentReport.UnsupportedDataTypesDesc = DATATYPE_ISSUE_TYPE_DESCRIPTION + assessmentReport.UnsupportedDataTypesDesc = DATATYPE_CATEGORY_DESCRIPTION + + addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB) assessmentReport.Sizing = migassessment.SizingReport assessmentReport.TableIndexStats, err = assessmentDB.FetchAllStats() @@ -885,7 +898,6 @@ func generateAssessmentReport() (err error) { } addNotesToAssessmentReport() - addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB) postProcessingOfAssessmentReport() assessmentReportDir := filepath.Join(exportDir, "assessment", "reports") @@ -903,20 +915,16 @@ func generateAssessmentReport() (err error) { func getAssessmentReportContentFromAnalyzeSchema() error { /* - Here we are generating analyze schema report which converts issue instance to analyze schema issue - Then in assessment codepath we extract the required information from analyze schema issue which could have been done directly from issue instance(TODO) + Here we are generating analyze schema report which converts issue instance to analyze schema issue + Then in assessment codepath we extract the required information from analyze schema issue which could have been done directly from issue instance(TODO) - But current Limitation is analyze schema currently uses regexp etc to detect some issues(not using parser). + But current Limitation is analyze schema currently uses regexp etc to detect some issues(not using parser). */ schemaAnalysisReport := analyzeSchemaInternal(&source, true) assessmentReport.MigrationComplexity = schemaAnalysisReport.MigrationComplexity assessmentReport.SchemaSummary = schemaAnalysisReport.SchemaSummary - assessmentReport.SchemaSummary.Description = SCHEMA_SUMMARY_DESCRIPTION - if source.DBType == ORACLE { - assessmentReport.SchemaSummary.Description = SCHEMA_SUMMARY_DESCRIPTION_ORACLE - } + assessmentReport.SchemaSummary.Description = lo.Ternary(source.DBType == ORACLE, SCHEMA_SUMMARY_DESCRIPTION_ORACLE, SCHEMA_SUMMARY_DESCRIPTION) - // fetching unsupportedFeaturing with the help of Issues report in SchemaReport var unsupportedFeatures []UnsupportedFeature var err error switch source.DBType { @@ -928,13 +936,14 @@ func getAssessmentReportContentFromAnalyzeSchema() error { panic(fmt.Sprintf("unsupported source db type %q", source.DBType)) } if err != nil { - return fmt.Errorf("failed to fetch %s unsupported features: %w", source.DBType, err) + return fmt.Errorf("failed to fetch '%s' unsupported features: %w", source.DBType, err) } assessmentReport.UnsupportedFeatures = append(assessmentReport.UnsupportedFeatures, unsupportedFeatures...) - assessmentReport.UnsupportedFeaturesDesc = FEATURE_ISSUE_TYPE_DESCRIPTION + assessmentReport.UnsupportedFeaturesDesc = FEATURE_CATEGORY_DESCRIPTION + + // Ques: Do we still need this and REPORT_UNSUPPORTED_QUERY_CONSTRUCTS env var if utils.GetEnvAsBool("REPORT_UNSUPPORTED_PLPGSQL_OBJECTS", true) { - unsupportedPlpgSqlObjects := fetchUnsupportedPlPgSQLObjects(schemaAnalysisReport) - assessmentReport.UnsupportedPlPgSqlObjects = unsupportedPlpgSqlObjects + assessmentReport.UnsupportedPlPgSqlObjects = fetchUnsupportedPlPgSQLObjects(schemaAnalysisReport) } return nil } @@ -968,28 +977,30 @@ func getUnsupportedFeaturesFromSchemaAnalysisReport(featureName string, issueRea var minVersionsFixedIn map[string]*ybversion.YBVersion var minVersionsFixedInSet bool - for _, issue := range schemaAnalysisReport.Issues { - if !slices.Contains([]string{UNSUPPORTED_FEATURES, MIGRATION_CAVEATS}, issue.IssueType) { + for _, analyzeIssue := range schemaAnalysisReport.Issues { + if !slices.Contains([]string{UNSUPPORTED_FEATURES, MIGRATION_CAVEATS}, analyzeIssue.IssueType) { continue } - issueMatched := lo.Ternary[bool](issueType != "", issueType == issue.Type, strings.Contains(issue.Reason, issueReason)) + issueMatched := lo.Ternary[bool](issueType != "", issueType == analyzeIssue.Type, strings.Contains(analyzeIssue.Reason, issueReason)) if issueMatched { - objectInfo := ObjectInfo{ - ObjectName: issue.ObjectName, - SqlStatement: issue.SqlStatement, - } - link = issue.DocsLink - objects = append(objects, objectInfo) if !minVersionsFixedInSet { - minVersionsFixedIn = issue.MinimumVersionsFixedIn + minVersionsFixedIn = analyzeIssue.MinimumVersionsFixedIn minVersionsFixedInSet = true } - if !areMinVersionsFixedInEqual(minVersionsFixedIn, issue.MinimumVersionsFixedIn) { - utils.ErrExit("Issues belonging to UnsupportedFeature %s have different minimum versions fixed in: %v, %v", featureName, minVersionsFixedIn, issue.MinimumVersionsFixedIn) + if !areMinVersionsFixedInEqual(minVersionsFixedIn, analyzeIssue.MinimumVersionsFixedIn) { + utils.ErrExit("Issues belonging to UnsupportedFeature %s have different minimum versions fixed in: %v, %v", featureName, minVersionsFixedIn, analyzeIssue.MinimumVersionsFixedIn) } + + objectInfo := ObjectInfo{ + ObjectName: analyzeIssue.ObjectName, + SqlStatement: analyzeIssue.SqlStatement, + } + link = analyzeIssue.DocsLink + objects = append(objects, objectInfo) } } + return UnsupportedFeature{featureName, objects, displayDDLInHTML, link, description, minVersionsFixedIn} } @@ -1124,9 +1135,9 @@ func fetchUnsupportedPlPgSQLObjects(schemaAnalysisReport utils.SchemaReport) []U if source.DBType != POSTGRESQL { return nil } - analyzeIssues := schemaAnalysisReport.Issues - plpgsqlIssues := lo.Filter(analyzeIssues, func(issue utils.AnalyzeSchemaIssue, _ int) bool { - return issue.IssueType == UNSUPPORTED_PLPGSQL_OBEJCTS + + plpgsqlIssues := lo.Filter(schemaAnalysisReport.Issues, func(issue utils.AnalyzeSchemaIssue, _ int) bool { + return issue.IssueType == UNSUPPORTED_PLPGSQL_OBJECTS }) groupPlpgsqlIssuesByReason := lo.GroupBy(plpgsqlIssues, func(issue utils.AnalyzeSchemaIssue) string { return issue.Reason @@ -1139,11 +1150,6 @@ func fetchUnsupportedPlPgSQLObjects(schemaAnalysisReport utils.SchemaReport) []U var minVersionsFixedInSet bool for _, issue := range issues { - objects = append(objects, ObjectInfo{ - ObjectType: issue.ObjectType, - ObjectName: issue.ObjectName, - SqlStatement: issue.SqlStatement, - }) if !minVersionsFixedInSet { minVersionsFixedIn = issue.MinimumVersionsFixedIn minVersionsFixedInSet = true @@ -1151,6 +1157,12 @@ func fetchUnsupportedPlPgSQLObjects(schemaAnalysisReport utils.SchemaReport) []U if !areMinVersionsFixedInEqual(minVersionsFixedIn, issue.MinimumVersionsFixedIn) { utils.ErrExit("Issues belonging to UnsupportedFeature %s have different minimum versions fixed in: %v, %v", reason, minVersionsFixedIn, issue.MinimumVersionsFixedIn) } + + objects = append(objects, ObjectInfo{ + ObjectType: issue.ObjectType, + ObjectName: issue.ObjectName, + SqlStatement: issue.SqlStatement, + }) docsLink = issue.DocsLink } feature := UnsupportedFeature{ @@ -1412,7 +1424,7 @@ func addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(FOREIGN_TABLE_CAVEAT_FEATURE, "", queryissue.FOREIGN_TABLE, schemaAnalysisReport, false, DESCRIPTION_FOREIGN_TABLES)) migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(POLICIES_CAVEAT_FEATURE, "", queryissue.POLICY_WITH_ROLES, - schemaAnalysisReport, false, DESCRIPTION_POLICY_ROLE_ISSUE)) + schemaAnalysisReport, false, DESCRIPTION_POLICY_ROLE_DESCRIPTION)) if len(unsupportedDataTypesForLiveMigration) > 0 { columns := make([]ObjectInfo, 0) @@ -1420,7 +1432,7 @@ func addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration columns = append(columns, ObjectInfo{ObjectName: fmt.Sprintf("%s.%s.%s (%s)", col.SchemaName, col.TableName, col.ColumnName, col.DataType)}) } if len(columns) > 0 { - migrationCaveats = append(migrationCaveats, UnsupportedFeature{UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE, columns, false, UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_ISSUE, nil}) + migrationCaveats = append(migrationCaveats, UnsupportedFeature{UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE, columns, false, UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_DESCRIPTION, nil}) } } if len(unsupportedDataTypesForLiveMigrationWithFForFB) > 0 { @@ -1429,7 +1441,7 @@ func addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration columns = append(columns, ObjectInfo{ObjectName: fmt.Sprintf("%s.%s.%s (%s)", col.SchemaName, col.TableName, col.ColumnName, col.DataType)}) } if len(columns) > 0 { - migrationCaveats = append(migrationCaveats, UnsupportedFeature{UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE, columns, false, UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_ISSUE, nil}) + migrationCaveats = append(migrationCaveats, UnsupportedFeature{UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE, columns, false, UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_DESCRIPTION, nil}) } } migrationCaveats = lo.Filter(migrationCaveats, func(m UnsupportedFeature, _ int) bool { diff --git a/yb-voyager/cmd/assessMigrationCommandV2.go b/yb-voyager/cmd/assessMigrationCommandV2.go new file mode 100644 index 000000000..2a7e21e67 --- /dev/null +++ b/yb-voyager/cmd/assessMigrationCommandV2.go @@ -0,0 +1,424 @@ +package cmd + +import ( + "fmt" + "strings" + + "github.com/samber/lo" + log "github.com/sirupsen/logrus" + "github.com/yugabyte/yb-voyager/yb-voyager/src/constants" + "github.com/yugabyte/yb-voyager/yb-voyager/src/migassessment" + "github.com/yugabyte/yb-voyager/yb-voyager/src/query/queryissue" + "github.com/yugabyte/yb-voyager/yb-voyager/src/query/queryparser" + "github.com/yugabyte/yb-voyager/yb-voyager/src/utils" + "github.com/yugabyte/yb-voyager/yb-voyager/src/ybversion" + "golang.org/x/exp/slices" +) + +// TODO: content of this function will move to generateAssessmentReport() +// For now this function will fetch all the issues which will be used by the migration complexity determination logic +func fetchAllAssessmentIssues() ([]AssessmentIssue, error) { + var assessmentIssues []AssessmentIssue + + assessmentIssues = append(assessmentIssues, getAssessmentReportContentFromAnalyzeSchemaV2()...) + + issues, err := fetchUnsupportedObjectTypesV2() + if err != nil { + return assessmentIssues, fmt.Errorf("failed to fetch unsupported object type issues: %w", err) + } + assessmentIssues = append(assessmentIssues, issues...) + + if utils.GetEnvAsBool("REPORT_UNSUPPORTED_QUERY_CONSTRUCTS", true) { + issues, err := fetchUnsupportedQueryConstructsV2() + if err != nil { + return assessmentIssues, fmt.Errorf("failed to fetch unsupported queries on YugabyteDB: %w", err) + } + assessmentIssues = append(assessmentIssues, issues...) + } + + unsupportedDataTypes, unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB, err := fetchColumnsWithUnsupportedDataTypes() + if err != nil { + return assessmentIssues, fmt.Errorf("failed to fetch columns with unsupported data types: %w", err) + } + + assessmentIssues = append(assessmentIssues, getAssessmentIssuesForUnsupportedDatatypes(unsupportedDataTypes)...) + + assessmentIssues = append(assessmentIssues, fetchMigrationCaveatAssessmentIssues(unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB)...) + + return assessmentIssues, nil +} + +// TODO: replacement of getAssessmentReportContentFromAnalyzeSchema() +func getAssessmentReportContentFromAnalyzeSchemaV2() []AssessmentIssue { + /* + Here we are generating analyze schema report which converts issue instance to analyze schema issue + Then in assessment codepath we extract the required information from analyze schema issue which could have been done directly from issue instance(TODO) + + But current Limitation is analyze schema currently uses regexp etc to detect some issues(not using parser). + */ + schemaAnalysisReport := analyzeSchemaInternal(&source, true) + assessmentReport.MigrationComplexity = schemaAnalysisReport.MigrationComplexity + assessmentReport.SchemaSummary = schemaAnalysisReport.SchemaSummary + assessmentReport.SchemaSummary.Description = lo.Ternary(source.DBType == ORACLE, SCHEMA_SUMMARY_DESCRIPTION_ORACLE, SCHEMA_SUMMARY_DESCRIPTION) + + var assessmentIssues []AssessmentIssue + switch source.DBType { + case ORACLE: + assessmentIssues = append(assessmentIssues, fetchUnsupportedOracleFeaturesFromSchemaReportV2(schemaAnalysisReport)...) + case POSTGRESQL: + assessmentIssues = append(assessmentIssues, fetchUnsupportedPGFeaturesFromSchemaReportV2(schemaAnalysisReport)...) + default: + panic(fmt.Sprintf("unsupported source db type %q", source.DBType)) + } + + // Ques: Do we still need this and REPORT_UNSUPPORTED_QUERY_CONSTRUCTS env var + if utils.GetEnvAsBool("REPORT_UNSUPPORTED_PLPGSQL_OBJECTS", true) { + assessmentIssues = append(assessmentIssues, fetchUnsupportedPlPgSQLObjectsV2(schemaAnalysisReport)...) + } + + return assessmentIssues +} + +// TODO: will replace fetchUnsupportedOracleFeaturesFromSchemaReport() +func fetchUnsupportedOracleFeaturesFromSchemaReportV2(schemaAnalysisReport utils.SchemaReport) []AssessmentIssue { + log.Infof("fetching assessment issues of feature category for Oracle...") + var assessmentIssues []AssessmentIssue + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", COMPOUND_TRIGGER_ISSUE_REASON, schemaAnalysisReport, "")...) + return assessmentIssues +} + +// TODO: will replace fetchUnsupportedPGFeaturesFromSchemaReport() +func fetchUnsupportedPGFeaturesFromSchemaReportV2(schemaAnalysisReport utils.SchemaReport) []AssessmentIssue { + log.Infof("fetching assessment issues of feature category for PG...") + + var assessmentIssues []AssessmentIssue + for _, indexMethod := range queryissue.UnsupportedIndexMethods { + displayIndexMethod := strings.ToUpper(indexMethod) + reason := fmt.Sprintf(INDEX_METHOD_ISSUE_REASON, displayIndexMethod) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", reason, schemaAnalysisReport, "")...) + } + + assessmentIssues = append(assessmentIssues, getIndexesOnComplexTypeUnsupportedFeatureV2(schemaAnalysisReport, queryissue.UnsupportedIndexDatatypes)...) + + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.CONSTRAINT_TRIGGER, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.INHERITANCE, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.STORED_GENERATED_COLUMNS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", CONVERSION_ISSUE_REASON, schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.MULTI_COLUMN_GIN_INDEX, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ALTER_TABLE_SET_COLUMN_ATTRIBUTE, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ALTER_TABLE_DISABLE_RULE, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ALTER_TABLE_CLUSTER_ON, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.STORAGE_PARAMETER, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", UNSUPPORTED_EXTENSION_ISSUE, schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.EXCLUSION_CONSTRAINTS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.DEFERRABLE_CONSTRAINTS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", VIEW_CHECK_OPTION_ISSUE, schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.PK_UK_ON_COMPLEX_DATATYPE, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.UNLOGGED_TABLE, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.REFERENCING_CLAUSE_IN_TRIGGER, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.BEFORE_ROW_TRIGGER_ON_PARTITIONED_TABLE, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ADVISORY_LOCKS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.XML_FUNCTIONS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.SYSTEM_COLUMNS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.LARGE_OBJECT_FUNCTIONS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.REGEX_FUNCTIONS, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.FETCH_WITH_TIES, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_QUERY_FUNCTION, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_CONSTRUCTOR_FUNCTION, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.AGGREGATE_FUNCTION, "", schemaAnalysisReport, "")...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.SECURITY_INVOKER_VIEWS, "", schemaAnalysisReport, "")...) + + return assessmentIssues +} + +func getIndexesOnComplexTypeUnsupportedFeatureV2(schemaAnalysisReport utils.SchemaReport, unsupportedIndexDatatypes []string) []AssessmentIssue { + var assessmentIssues []AssessmentIssue + // TODO: include MinimumVersionsFixedIn + + unsupportedIndexDatatypes = append(unsupportedIndexDatatypes, "array") // adding it here only as we know issue form analyze will come with type + unsupportedIndexDatatypes = append(unsupportedIndexDatatypes, "user_defined_type") // adding it here as we UDTs will come with this type. + for _, unsupportedType := range unsupportedIndexDatatypes { + // formattedObject.ObjectName = fmt.Sprintf("%s: %s", strings.ToUpper(unsupportedType), object.ObjectName) + issueReason := fmt.Sprintf(ISSUE_INDEX_WITH_COMPLEX_DATATYPES, unsupportedType) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", issueReason, schemaAnalysisReport, "")...) + } + return assessmentIssues +} + +// TODO: replacement of fetchUnsupportedObjectTypes() +func fetchUnsupportedObjectTypesV2() ([]AssessmentIssue, error) { + if source.DBType != ORACLE { + return nil, nil + } + + query := fmt.Sprintf(`SELECT schema_name, object_name, object_type FROM %s`, migassessment.OBJECT_TYPE_MAPPING) + rows, err := assessmentDB.Query(query) + if err != nil { + return nil, fmt.Errorf("error querying-%s: %w", query, err) + } + defer func() { + closeErr := rows.Close() + if closeErr != nil { + log.Warnf("error closing rows while fetching object type mapping metadata: %v", err) + } + }() + + var assessmentIssues []AssessmentIssue + for rows.Next() { + var schemaName, objectName, objectType string + err = rows.Scan(&schemaName, &objectName, &objectType) + if err != nil { + return nil, fmt.Errorf("error scanning rows:%w", err) + } + + switch { + case slices.Contains(OracleUnsupportedIndexTypes, objectType): + assessmentIssues = append(assessmentIssues, AssessmentIssue{ + Category: constants.FEATURE, + TypeName: UNSUPPORTED_INDEXES_FEATURE, + Impact: "", // TODO + ObjectType: "INDEX", + ObjectName: fmt.Sprintf("Index Name: %s, Index Type=%s", objectName, objectType), + }) + case objectType == VIRTUAL_COLUMN: + assessmentIssues = append(assessmentIssues, AssessmentIssue{ + Category: constants.FEATURE, + TypeName: VIRTUAL_COLUMNS_FEATURE, + ObjectName: objectName, + }) + case objectType == INHERITED_TYPE: + assessmentIssues = append(assessmentIssues, AssessmentIssue{ + Category: constants.FEATURE, + TypeName: INHERITED_TYPES_FEATURE, + ObjectName: objectName, + }) + case objectType == REFERENCE_PARTITION || objectType == SYSTEM_PARTITION: + referenceOrTablePartitionPresent = true + assessmentIssues = append(assessmentIssues, AssessmentIssue{ + Category: constants.FEATURE, + TypeName: UNSUPPORTED_PARTITIONING_METHODS_FEATURE, + ObjectType: "TABLE", + ObjectName: fmt.Sprintf("Table Name: %s, Partition Method: %s", objectName, objectType), + }) + } + } + + return assessmentIssues, nil +} + +// // TODO: replacement of fetchUnsupportedPlPgSQLObjects() +func fetchUnsupportedPlPgSQLObjectsV2(schemaAnalysisReport utils.SchemaReport) []AssessmentIssue { + if source.DBType != POSTGRESQL { + return nil + } + + plpgsqlIssues := lo.Filter(schemaAnalysisReport.Issues, func(issue utils.AnalyzeSchemaIssue, _ int) bool { + return issue.IssueType == UNSUPPORTED_PLPGSQL_OBJECTS + }) + groupedPlpgsqlIssuesByReason := lo.GroupBy(plpgsqlIssues, func(issue utils.AnalyzeSchemaIssue) string { + return issue.Reason + }) + + var assessmentIssues []AssessmentIssue + for reason, issues := range groupedPlpgsqlIssuesByReason { + var minVersionsFixedIn map[string]*ybversion.YBVersion + var minVersionsFixedInSet bool + + for _, issue := range issues { + if !minVersionsFixedInSet { + minVersionsFixedIn = issue.MinimumVersionsFixedIn + minVersionsFixedInSet = true + } + if !areMinVersionsFixedInEqual(minVersionsFixedIn, issue.MinimumVersionsFixedIn) { + utils.ErrExit("Issues belonging to UnsupportedFeature %s have different minimum versions fixed in: %v, %v", reason, minVersionsFixedIn, issue.MinimumVersionsFixedIn) + } + + assessmentIssues = append(assessmentIssues, AssessmentIssue{ + Category: constants.PLPGSQL_OBJECT, + TypeName: reason, + Impact: issue.Impact, // TODO + ObjectType: issue.ObjectType, + ObjectName: issue.ObjectName, + SqlStatement: issue.SqlStatement, + DocsLink: issue.DocsLink, + MinimumVersionFixedIn: issue.MinimumVersionsFixedIn, + }) + } + } + + return assessmentIssues +} + +// Q: do we no need of displayDDLInHTML in this approach? DDL can always be there for issues in the table. +func getAssessmentIssuesFromSchemaAnalysisReport(category string, issueType string, issueReason string, schemaAnalysisReport utils.SchemaReport, issueDescription string) []AssessmentIssue { + log.Infof("filtering issues for type: %s", issueType) + var issues []AssessmentIssue + var minVersionsFixedIn map[string]*ybversion.YBVersion + var minVersionsFixedInSet bool + for _, analyzeIssue := range schemaAnalysisReport.Issues { + if !slices.Contains([]string{UNSUPPORTED_FEATURES, MIGRATION_CAVEATS}, analyzeIssue.IssueType) { + continue + } + + issueMatched := lo.Ternary[bool](issueType != "", issueType == analyzeIssue.Type, strings.Contains(analyzeIssue.Reason, issueReason)) + if issueMatched { + if !minVersionsFixedInSet { + minVersionsFixedIn = analyzeIssue.MinimumVersionsFixedIn + minVersionsFixedInSet = true + } + if !areMinVersionsFixedInEqual(minVersionsFixedIn, analyzeIssue.MinimumVersionsFixedIn) { + utils.ErrExit("Issues belonging to type %s have different minimum versions fixed in: %v, %v", analyzeIssue.Type, minVersionsFixedIn, analyzeIssue.MinimumVersionsFixedIn) + } + + issues = append(issues, AssessmentIssue{ + Category: category, + CategoryDescription: GetCategoryDescription(category), + TypeName: analyzeIssue.Type, + TypeDescription: issueDescription, // TODO: verify + Impact: analyzeIssue.Impact, + ObjectType: analyzeIssue.ObjectType, + ObjectName: analyzeIssue.ObjectName, + SqlStatement: analyzeIssue.SqlStatement, + DocsLink: analyzeIssue.DocsLink, + MinimumVersionFixedIn: minVersionsFixedIn, + }) + } + } + + return issues +} + +// TODO: soon to replace fetchUnsupportedQueryConstructs() +func fetchUnsupportedQueryConstructsV2() ([]AssessmentIssue, error) { + if source.DBType != POSTGRESQL { + return nil, nil + } + + query := fmt.Sprintf("SELECT DISTINCT query from %s", migassessment.DB_QUERIES_SUMMARY) + rows, err := assessmentDB.Query(query) + if err != nil { + return nil, fmt.Errorf("error querying=%s on assessmentDB: %w", query, err) + } + defer func() { + closeErr := rows.Close() + if closeErr != nil { + log.Warnf("error closing rows while fetching database queries summary metadata: %v", err) + } + }() + + var executedQueries []string + for rows.Next() { + var executedQuery string + err := rows.Scan(&executedQuery) + if err != nil { + return nil, fmt.Errorf("error scanning rows: %w", err) + } + executedQueries = append(executedQueries, executedQuery) + } + + if len(executedQueries) == 0 { + log.Infof("queries info not present in the assessment metadata for detecting unsupported query constructs") + return nil, nil + } + + var assessmentIssues []AssessmentIssue + for i := 0; i < len(executedQueries); i++ { + query := executedQueries[i] + log.Debugf("fetching unsupported query constructs for query - [%s]", query) + collectedSchemaList, err := queryparser.GetSchemaUsed(query) + if err != nil { // no need to error out if failed to get schemas for a query + log.Errorf("failed to get schemas used for query [%s]: %v", query, err) + continue + } + + log.Infof("collected schema list %v(len=%d) for query [%s]", collectedSchemaList, len(collectedSchemaList), query) + if !considerQueryForIssueDetection(collectedSchemaList) { + log.Infof("ignoring query due to difference in collected schema list %v(len=%d) vs source schema list %v(len=%d)", + collectedSchemaList, len(collectedSchemaList), source.GetSchemaList(), len(source.GetSchemaList())) + continue + } + + issues, err := parserIssueDetector.GetDMLIssues(query, targetDbVersion) + if err != nil { + log.Errorf("failed while trying to fetch query issues in query - [%s]: %v", + query, err) + } + + for _, issue := range issues { + issue := AssessmentIssue{ + Category: constants.QUERY_CONSTRUCT, + TypeName: issue.TypeName, + SqlStatement: issue.SqlStatement, + DocsLink: issue.DocsLink, + MinimumVersionFixedIn: issue.MinimumVersionsFixedIn, + } + assessmentIssues = append(assessmentIssues, issue) + } + } + + return assessmentIssues, nil +} + +func getAssessmentIssuesForUnsupportedDatatypes(unsupportedDatatypes []utils.TableColumnsDataTypes) []AssessmentIssue { + var assessmentIssues []AssessmentIssue + for _, colInfo := range unsupportedDatatypes { + qualifiedColName := fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName) + issue := AssessmentIssue{ + Category: constants.DATATYPE, + CategoryDescription: GetCategoryDescription(constants.DATATYPE), + TypeName: colInfo.DataType, // TODO: maybe name it like "unsupported datatype - geometry" + Impact: "", // TODO + ObjectType: constants.COLUMN, + ObjectName: qualifiedColName, + DocsLink: "", // TODO + MinimumVersionFixedIn: nil, // TODO + } + assessmentIssues = append(assessmentIssues, issue) + } + + return assessmentIssues +} + +// TODO: soon to replace addMigrationCaveatsToAssessmentReport() +func fetchMigrationCaveatAssessmentIssues(unsupportedDataTypesForLiveMigration []utils.TableColumnsDataTypes, unsupportedDataTypesForLiveMigrationWithFForFB []utils.TableColumnsDataTypes) []AssessmentIssue { + var assessmentIssues []AssessmentIssue + switch source.DBType { + case POSTGRESQL: + log.Infof("fetching migration caveat category assessment issues") + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, queryissue.ALTER_TABLE_ADD_PK_ON_PARTITIONED_TABLE, "", schemaAnalysisReport, DESCRIPTION_ADD_PK_TO_PARTITION_TABLE)...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, queryissue.FOREIGN_TABLE, "", schemaAnalysisReport, DESCRIPTION_FOREIGN_TABLES)...) + assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, queryissue.POLICY_WITH_ROLES, "", schemaAnalysisReport, DESCRIPTION_POLICY_ROLE_DESCRIPTION)...) + + if len(unsupportedDataTypesForLiveMigration) > 0 { + for _, colInfo := range unsupportedDataTypesForLiveMigration { + issue := AssessmentIssue{ + Category: constants.MIGRATION_CAVEATS, + CategoryDescription: "", // TODO + TypeName: UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE, // TODO add object type in type name + TypeDescription: UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_DESCRIPTION, + ObjectType: constants.COLUMN, + ObjectName: fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName), + DocsLink: UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, + } + assessmentIssues = append(assessmentIssues, issue) + } + } + + if len(unsupportedDataTypesForLiveMigrationWithFForFB) > 0 { + for _, colInfo := range unsupportedDataTypesForLiveMigrationWithFForFB { + issue := AssessmentIssue{ + Category: constants.MIGRATION_CAVEATS, + CategoryDescription: "", // TODO + TypeName: UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE, // TODO add object type in type name + TypeDescription: UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_DESCRIPTION, + ObjectType: constants.COLUMN, + ObjectName: fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName), + DocsLink: UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, + } + assessmentIssues = append(assessmentIssues, issue) + } + } + } + + return assessmentIssues +} diff --git a/yb-voyager/cmd/constants.go b/yb-voyager/cmd/constants.go index f4d9cc4fc..e1f0ff1a6 100644 --- a/yb-voyager/cmd/constants.go +++ b/yb-voyager/cmd/constants.go @@ -15,6 +15,11 @@ limitations under the License. */ package cmd +import ( + "github.com/yugabyte/yb-voyager/yb-voyager/src/constants" + "github.com/yugabyte/yb-voyager/yb-voyager/src/utils" +) + const ( KB = 1024 MB = 1024 * 1024 @@ -104,7 +109,8 @@ const ( UNSUPPORTED_FEATURES = "unsupported_features" UNSUPPORTED_DATATYPES = "unsupported_datatypes" - UNSUPPORTED_PLPGSQL_OBEJCTS = "unsupported_plpgsql_objects" + UNSUPPORTED_PLPGSQL_OBJECTS = "unsupported_plpgsql_objects" + MIGRATION_CAVEATS = "migration_caveats" REPORT_UNSUPPORTED_QUERY_CONSTRUCTS = "REPORT_UNSUPPORTED_QUERY_CONSTRUCTS" HTML = "html" @@ -173,21 +179,14 @@ const ( List of all the features we are reporting as part of Unsupported features and Migration caveats */ const ( - // AssessmentIssue types used in YugabyteD payload - FEATURE = "feature" - DATATYPE = "datatype" - QUERY_CONSTRUCT = "query_construct" // confused: in json for some values we are using space separated and for some snake_case - MIGRATION_CAVEATS = "migration_caveats" - PLPGSQL_OBJECT = "plpgsql_object" - // Description - FEATURE_ISSUE_TYPE_DESCRIPTION = "Features of the source database that are not supported on the target YugabyteDB." - DATATYPE_ISSUE_TYPE_DESCRIPTION = "Data types of the source database that are not supported on the target YugabyteDB." - MIGRATION_CAVEATS_TYPE_DESCRIPTION = "Migration Caveats highlights the current limitations with the migration workflow." - UNSUPPORTED_QUERY_CONSTRUTS_DESCRIPTION = "Source database queries not supported in YugabyteDB, identified by scanning system tables." - UNSUPPPORTED_PLPGSQL_OBJECT_DESCRIPTION = "Source schema objects having unsupported statements on the target YugabyteDB in PL/pgSQL code block" - SCHEMA_SUMMARY_DESCRIPTION = "Objects that will be created on the target YugabyteDB." - SCHEMA_SUMMARY_DESCRIPTION_ORACLE = SCHEMA_SUMMARY_DESCRIPTION + " Some of the index and sequence names might be different from those in the source database." + FEATURE_CATEGORY_DESCRIPTION = "Features of the source database that are not supported on the target YugabyteDB." + DATATYPE_CATEGORY_DESCRIPTION = "Data types of the source database that are not supported on the target YugabyteDB." + MIGRATION_CAVEATS_CATEGORY_DESCRIPTION = "Migration Caveats highlights the current limitations with the migration workflow." + UNSUPPORTED_QUERY_CONSTRUCTS_CATEGORY_DESCRIPTION = "Source database queries not supported in YugabyteDB, identified by scanning system tables." + UNSUPPPORTED_PLPGSQL_OBJECT_CATEGORY_DESCRIPTION = "Source schema objects having unsupported statements on the target YugabyteDB in PL/pgSQL code block" + SCHEMA_SUMMARY_DESCRIPTION = "Objects that will be created on the target YugabyteDB." + SCHEMA_SUMMARY_DESCRIPTION_ORACLE = SCHEMA_SUMMARY_DESCRIPTION + " Some of the index and sequence names might be different from those in the source database." //Unsupported Features @@ -222,16 +221,16 @@ const ( // Migration caveats //POSTGRESQL - ALTER_PARTITION_ADD_PK_CAVEAT_FEATURE = "Alter partitioned tables to add Primary Key" - FOREIGN_TABLE_CAVEAT_FEATURE = "Foreign tables" - POLICIES_CAVEAT_FEATURE = "Policies" - UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE = "Unsupported Data Types for Live Migration" - UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE = "Unsupported Data Types for Live Migration with Fall-forward/Fallback" - UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_ISSUE = "There are some data types in the schema that are not supported by live migration of data. These columns will be excluded when exporting and importing data in live migration workflows." - UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_ISSUE = "There are some data types in the schema that are not supported by live migration with fall-forward/fall-back. These columns will be excluded when exporting and importing data in live migration workflows." - DESCRIPTION_ADD_PK_TO_PARTITION_TABLE = `After export schema, the ALTER table should be merged with CREATE table for partitioned tables as alter of partitioned tables to add primary key is not supported.` - DESCRIPTION_FOREIGN_TABLES = `During the export schema phase, SERVER and USER MAPPING objects are not exported. These should be manually created to make the foreign tables work.` - DESCRIPTION_POLICY_ROLE_ISSUE = `There are some policies that are created for certain users/roles. During the export schema phase, USERs and GRANTs are not exported. Therefore, they will have to be manually created before running import schema.` + ALTER_PARTITION_ADD_PK_CAVEAT_FEATURE = "Alter partitioned tables to add Primary Key" + FOREIGN_TABLE_CAVEAT_FEATURE = "Foreign tables" + POLICIES_CAVEAT_FEATURE = "Policies" + UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE = "Unsupported Data Types for Live Migration" + UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE = "Unsupported Data Types for Live Migration with Fall-forward/Fallback" + UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_DESCRIPTION = "There are some data types in the schema that are not supported by live migration of data. These columns will be excluded when exporting and importing data in live migration workflows." + UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_DESCRIPTION = "There are some data types in the schema that are not supported by live migration with fall-forward/fall-back. These columns will be excluded when exporting and importing data in live migration workflows." + DESCRIPTION_ADD_PK_TO_PARTITION_TABLE = `After export schema, the ALTER table should be merged with CREATE table for partitioned tables as alter of partitioned tables to add primary key is not supported.` + DESCRIPTION_FOREIGN_TABLES = `During the export schema phase, SERVER and USER MAPPING objects are not exported. These should be manually created to make the foreign tables work.` + DESCRIPTION_POLICY_ROLE_DESCRIPTION = `There are some policies that are created for certain users/roles. During the export schema phase, USERs and GRANTs are not exported. Therefore, they will have to be manually created before running import schema.` ) var supportedSourceDBTypes = []string{ORACLE, MYSQL, POSTGRESQL, YUGABYTEDB} @@ -244,3 +243,22 @@ var validSSLModes = map[string][]string{ } var EVENT_BATCH_MAX_RETRY_COUNT = 50 + +// returns the description for a given assessment issue category +func GetCategoryDescription(category string) string { + switch category { + case constants.FEATURE: + return FEATURE_CATEGORY_DESCRIPTION + case constants.DATATYPE: + return DATATYPE_CATEGORY_DESCRIPTION + case constants.QUERY_CONSTRUCT: + return UNSUPPORTED_QUERY_CONSTRUCTS_CATEGORY_DESCRIPTION + case constants.PLPGSQL_OBJECT: + return UNSUPPPORTED_PLPGSQL_OBJECT_CATEGORY_DESCRIPTION + case constants.MIGRATION_CAVEATS: + return MIGRATION_CAVEATS_CATEGORY_DESCRIPTION + default: + utils.ErrExit("unsupported assessment issue category %q", category) + } + return "" +} diff --git a/yb-voyager/src/constants/constants.go b/yb-voyager/src/constants/constants.go index 7912fb1d5..59710a9e4 100644 --- a/yb-voyager/src/constants/constants.go +++ b/yb-voyager/src/constants/constants.go @@ -19,10 +19,19 @@ const ( // Database Object types TABLE = "table" FUNCTION = "function" + COLUMN = "column" // Source DB Types YUGABYTEDB = "yugabytedb" POSTGRESQL = "postgresql" ORACLE = "oracle" MYSQL = "mysql" + + // AssessmentIssue Categoes - used by YugabyteD payload and Migration Complexity Explainability + // TODO: soon to be renamed as SCHEMA, SCHEMA_PLPGSQL, DML_QUERY, MIGRATION_CAVEAT, "DATATYPE" + FEATURE = "feature" + DATATYPE = "datatype" + QUERY_CONSTRUCT = "query_construct" + MIGRATION_CAVEATS = "migration_caveats" + PLPGSQL_OBJECT = "plpgsql_object" ) diff --git a/yb-voyager/src/issue/issue.go b/yb-voyager/src/issue/issue.go index c30d4f837..fc4978a97 100644 --- a/yb-voyager/src/issue/issue.go +++ b/yb-voyager/src/issue/issue.go @@ -24,6 +24,7 @@ type Issue struct { Type string // (advisory_locks, index_not_supported, etc) TypeName string // for display TypeDescription string + Impact string Suggestion string GH string DocsLink string diff --git a/yb-voyager/src/utils/commonVariables.go b/yb-voyager/src/utils/commonVariables.go index b42c547d9..4463f65b6 100644 --- a/yb-voyager/src/utils/commonVariables.go +++ b/yb-voyager/src/utils/commonVariables.go @@ -103,10 +103,11 @@ type DBObject struct { type AnalyzeSchemaIssue struct { // TODO: rename IssueType to Category IssueType string `json:"IssueType"` //category: unsupported_features, unsupported_plpgsql_objects, etc - ObjectType string `json:"ObjectType"` - ObjectName string `json:"ObjectName"` Reason string `json:"Reason"` Type string `json:"-" xml:"-"` // identifier for issue type ADVISORY_LOCKS, SYSTEM_COLUMNS, etc + Impact string `json:"-" xml:"-"` + ObjectType string `json:"ObjectType"` + ObjectName string `json:"ObjectName"` SqlStatement string `json:"SqlStatement,omitempty"` FilePath string `json:"FilePath"` Suggestion string `json:"Suggestion"`