diff --git a/yb-voyager/cmd/assessMigrationCommand.go b/yb-voyager/cmd/assessMigrationCommand.go index daa6210aa..1f31e940e 100644 --- a/yb-voyager/cmd/assessMigrationCommand.go +++ b/yb-voyager/cmd/assessMigrationCommand.go @@ -31,7 +31,6 @@ import ( "syscall" "text/template" - "github.com/davecgh/go-spew/spew" "github.com/fatih/color" "github.com/samber/lo" log "github.com/sirupsen/logrus" @@ -425,20 +424,16 @@ func assessMigration() (err error) { return fmt.Errorf("failed to generate assessment report: %w", err) } - assessmentIssues, err := fetchAllAssessmentIssues() - if err != nil { - log.Errorf("failed to fetch the assessment issues for migration complexity: %v", err) - } - log.Infof("number of assessment issues detected: %d\n", len(assessmentIssues)) - var data string - for i, issue := range assessmentIssues { - data += fmt.Sprintf("AssessmentIssue[%d] = %s\n\n", i, spew.Sdump(issue)) - } - assessIssuesFile := filepath.Join(exportDir, "assessment_issues.txt") - err = os.WriteFile(assessIssuesFile, []byte(data), 0677) - if err != nil { - panic("failed to write to assessment_issues.txt") - } + log.Infof("number of assessment issues detected: %d\n", len(assessmentReport.Issues)) + // var data string + // for i, issue := range assessmentReport.Issues { + // data += fmt.Sprintf("AssessmentIssue[%d] = %s\n\n", i, spew.Sdump(issue)) + // } + // assessIssuesFile := filepath.Join(exportDir, "assessment_issues.txt") + // err = os.WriteFile(assessIssuesFile, []byte(data), 0677) + // if err != nil { + // panic("failed to write to assessment_issues.txt") + // } utils.PrintAndLog("Migration assessment completed successfully.") completedEvent := createMigrationAssessmentCompletedEvent() @@ -907,6 +902,8 @@ func generateAssessmentReport() (err error) { assessmentReport.UnsupportedDataTypes = unsupportedDataTypes assessmentReport.UnsupportedDataTypesDesc = DATATYPE_CATEGORY_DESCRIPTION + assessmentReport.AppendIssues(getAssessmentIssuesForUnsupportedDatatypes(unsupportedDataTypes)...) + addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB) assessmentReport.Sizing = migassessment.SizingReport @@ -988,7 +985,7 @@ func areMinVersionsFixedInEqual(m1 map[string]*ybversion.YBVersion, m2 map[strin return true } -func getUnsupportedFeaturesFromSchemaAnalysisReport(featureName string, issueReason string, issueType string, schemaAnalysisReport utils.SchemaReport, displayDDLInHTML bool, description string) UnsupportedFeature { +func getUnsupportedFeaturesFromSchemaAnalysisReport(category string, featureName string, issueReason string, issueType string, schemaAnalysisReport utils.SchemaReport, displayDDLInHTML bool, description string) UnsupportedFeature { log.Info("filtering issues for feature: ", featureName) objects := make([]ObjectInfo, 0) link := "" // for oracle we shouldn't display any line for links @@ -1016,12 +1013,31 @@ func getUnsupportedFeaturesFromSchemaAnalysisReport(featureName string, issueRea } link = analyzeIssue.DocsLink objects = append(objects, objectInfo) + + assessmentReport.AppendIssues(convertAnalyzeSchemaIssueToAssessmentIssue(analyzeIssue, category, description, minVersionsFixedIn)) } } return UnsupportedFeature{featureName, objects, displayDDLInHTML, link, description, minVersionsFixedIn} } +// Q: do we no need of displayDDLInHTML in this approach? DDL can always be there for issues in the table if present. +func convertAnalyzeSchemaIssueToAssessmentIssue(analyzeSchemaIssue utils.AnalyzeSchemaIssue, category string, issueDescription string, minVersionsFixedIn map[string]*ybversion.YBVersion) AssessmentIssue { + return AssessmentIssue{ + Category: category, + CategoryDescription: GetCategoryDescription(category), + Type: analyzeSchemaIssue.Type, + Name: analyzeSchemaIssue.Reason, // in convertIssueInstanceToAnalyzeIssue() we assign IssueType to Reason field + Description: issueDescription, // TODO: verify + Impact: analyzeSchemaIssue.Impact, + ObjectType: analyzeSchemaIssue.ObjectType, + ObjectName: analyzeSchemaIssue.ObjectName, + SqlStatement: analyzeSchemaIssue.SqlStatement, + DocsLink: analyzeSchemaIssue.DocsLink, + MinimumVersionFixedIn: minVersionsFixedIn, + } +} + func fetchUnsupportedPGFeaturesFromSchemaReport(schemaAnalysisReport utils.SchemaReport) ([]UnsupportedFeature, error) { log.Infof("fetching unsupported features for PG...") unsupportedFeatures := make([]UnsupportedFeature, 0) @@ -1030,38 +1046,38 @@ func fetchUnsupportedPGFeaturesFromSchemaReport(schemaAnalysisReport utils.Schem displayIndexMethod := strings.ToUpper(indexMethod) featureName := fmt.Sprintf("%s indexes", displayIndexMethod) reason := fmt.Sprintf(INDEX_METHOD_ISSUE_REASON, displayIndexMethod) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(featureName, reason, "", schemaAnalysisReport, false, "")) - } - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(CONSTRAINT_TRIGGERS_FEATURE, "", queryissue.CONSTRAINT_TRIGGER, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(INHERITED_TABLES_FEATURE, "", queryissue.INHERITANCE, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(GENERATED_COLUMNS_FEATURE, "", queryissue.STORED_GENERATED_COLUMNS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(CONVERSIONS_OBJECTS_FEATURE, CONVERSION_ISSUE_REASON, "", schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(MULTI_COLUMN_GIN_INDEX_FEATURE, "", queryissue.MULTI_COLUMN_GIN_INDEX, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(ALTER_SETTING_ATTRIBUTE_FEATURE, "", queryissue.ALTER_TABLE_SET_COLUMN_ATTRIBUTE, schemaAnalysisReport, true, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(DISABLING_TABLE_RULE_FEATURE, "", queryissue.ALTER_TABLE_DISABLE_RULE, schemaAnalysisReport, true, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(CLUSTER_ON_FEATURE, "", queryissue.ALTER_TABLE_CLUSTER_ON, schemaAnalysisReport, true, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(STORAGE_PARAMETERS_FEATURE, "", queryissue.STORAGE_PARAMETER, schemaAnalysisReport, true, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(EXTENSION_FEATURE, UNSUPPORTED_EXTENSION_ISSUE, "", schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(EXCLUSION_CONSTRAINT_FEATURE, "", queryissue.EXCLUSION_CONSTRAINTS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(DEFERRABLE_CONSTRAINT_FEATURE, "", queryissue.DEFERRABLE_CONSTRAINTS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(VIEW_CHECK_FEATURE, VIEW_CHECK_OPTION_ISSUE, "", schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, featureName, reason, "", schemaAnalysisReport, false, "")) + } + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, CONSTRAINT_TRIGGERS_FEATURE, "", queryissue.CONSTRAINT_TRIGGER, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, INHERITED_TABLES_FEATURE, "", queryissue.INHERITANCE, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, GENERATED_COLUMNS_FEATURE, "", queryissue.STORED_GENERATED_COLUMNS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, CONVERSIONS_OBJECTS_FEATURE, CONVERSION_ISSUE_REASON, "", schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, MULTI_COLUMN_GIN_INDEX_FEATURE, "", queryissue.MULTI_COLUMN_GIN_INDEX, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, ALTER_SETTING_ATTRIBUTE_FEATURE, "", queryissue.ALTER_TABLE_SET_COLUMN_ATTRIBUTE, schemaAnalysisReport, true, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, DISABLING_TABLE_RULE_FEATURE, "", queryissue.ALTER_TABLE_DISABLE_RULE, schemaAnalysisReport, true, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, CLUSTER_ON_FEATURE, "", queryissue.ALTER_TABLE_CLUSTER_ON, schemaAnalysisReport, true, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, STORAGE_PARAMETERS_FEATURE, "", queryissue.STORAGE_PARAMETER, schemaAnalysisReport, true, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, EXTENSION_FEATURE, UNSUPPORTED_EXTENSION_ISSUE, "", schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, EXCLUSION_CONSTRAINT_FEATURE, "", queryissue.EXCLUSION_CONSTRAINTS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, DEFERRABLE_CONSTRAINT_FEATURE, "", queryissue.DEFERRABLE_CONSTRAINTS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, VIEW_CHECK_FEATURE, VIEW_CHECK_OPTION_ISSUE, "", schemaAnalysisReport, false, "")) unsupportedFeatures = append(unsupportedFeatures, getIndexesOnComplexTypeUnsupportedFeature(schemaAnalysisReport, queryissue.UnsupportedIndexDatatypes)) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(PK_UK_CONSTRAINT_ON_COMPLEX_DATATYPES_FEATURE, "", queryissue.PK_UK_ON_COMPLEX_DATATYPE, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(UNLOGGED_TABLE_FEATURE, "", queryissue.UNLOGGED_TABLE, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(REFERENCING_TRIGGER_FEATURE, "", queryissue.REFERENCING_CLAUSE_IN_TRIGGER, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(BEFORE_FOR_EACH_ROW_TRIGGERS_ON_PARTITIONED_TABLE_FEATURE, "", queryissue.BEFORE_ROW_TRIGGER_ON_PARTITIONED_TABLE, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.ADVISORY_LOCKS_NAME, "", queryissue.ADVISORY_LOCKS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.XML_FUNCTIONS_NAME, "", queryissue.XML_FUNCTIONS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.SYSTEM_COLUMNS_NAME, "", queryissue.SYSTEM_COLUMNS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.LARGE_OBJECT_FUNCTIONS_NAME, "", queryissue.LARGE_OBJECT_FUNCTIONS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(REGEX_FUNCTIONS_FEATURE, "", queryissue.REGEX_FUNCTIONS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(FETCH_WITH_TIES_FEATURE, "", queryissue.FETCH_WITH_TIES, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.JSON_QUERY_FUNCTIONS_NAME, "", queryissue.JSON_QUERY_FUNCTION, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.JSON_CONSTRUCTOR_FUNCTION_NAME, "", queryissue.JSON_CONSTRUCTOR_FUNCTION, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.AGGREGATION_FUNCTIONS_NAME, "", queryissue.AGGREGATE_FUNCTION, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.SECURITY_INVOKER_VIEWS_NAME, "", queryissue.SECURITY_INVOKER_VIEWS, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.FOREIGN_KEY_REFERENCES_PARTITIONED_TABLE_NAME, "", queryissue.FOREIGN_KEY_REFERENCES_PARTITIONED_TABLE, schemaAnalysisReport, false, "")) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(queryissue.JSON_TYPE_PREDICATE_NAME, "", queryissue.JSON_TYPE_PREDICATE, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, PK_UK_CONSTRAINT_ON_COMPLEX_DATATYPES_FEATURE, "", queryissue.PK_UK_ON_COMPLEX_DATATYPE, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, UNLOGGED_TABLE_FEATURE, "", queryissue.UNLOGGED_TABLE, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, REFERENCING_TRIGGER_FEATURE, "", queryissue.REFERENCING_CLAUSE_IN_TRIGGER, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, BEFORE_FOR_EACH_ROW_TRIGGERS_ON_PARTITIONED_TABLE_FEATURE, "", queryissue.BEFORE_ROW_TRIGGER_ON_PARTITIONED_TABLE, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ADVISORY_LOCKS_NAME, "", queryissue.ADVISORY_LOCKS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.XML_FUNCTIONS_NAME, "", queryissue.XML_FUNCTIONS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.SYSTEM_COLUMNS_NAME, "", queryissue.SYSTEM_COLUMNS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.LARGE_OBJECT_FUNCTIONS_NAME, "", queryissue.LARGE_OBJECT_FUNCTIONS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, REGEX_FUNCTIONS_FEATURE, "", queryissue.REGEX_FUNCTIONS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, FETCH_WITH_TIES_FEATURE, "", queryissue.FETCH_WITH_TIES, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_QUERY_FUNCTIONS_NAME, "", queryissue.JSON_QUERY_FUNCTION, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_CONSTRUCTOR_FUNCTION_NAME, "", queryissue.JSON_CONSTRUCTOR_FUNCTION, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.AGGREGATION_FUNCTIONS_NAME, "", queryissue.AGGREGATE_FUNCTION, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.SECURITY_INVOKER_VIEWS_NAME, "", queryissue.SECURITY_INVOKER_VIEWS, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.FOREIGN_KEY_REFERENCES_PARTITIONED_TABLE_NAME, "", queryissue.FOREIGN_KEY_REFERENCES_PARTITIONED_TABLE, schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_TYPE_PREDICATE_NAME, "", queryissue.JSON_TYPE_PREDICATE, schemaAnalysisReport, false, "")) return lo.Filter(unsupportedFeatures, func(f UnsupportedFeature, _ int) bool { return len(f.Objects) > 0 @@ -1078,7 +1094,7 @@ func getIndexesOnComplexTypeUnsupportedFeature(schemaAnalysisReport utils.Schema unsupportedIndexDatatypes = append(unsupportedIndexDatatypes, "array") // adding it here only as we know issue form analyze will come with type unsupportedIndexDatatypes = append(unsupportedIndexDatatypes, "user_defined_type") // adding it here as we UDTs will come with this type. for _, unsupportedType := range unsupportedIndexDatatypes { - indexes := getUnsupportedFeaturesFromSchemaAnalysisReport(fmt.Sprintf("%s indexes", unsupportedType), fmt.Sprintf(ISSUE_INDEX_WITH_COMPLEX_DATATYPES, unsupportedType), "", schemaAnalysisReport, false, "") + indexes := getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, fmt.Sprintf("%s indexes", unsupportedType), fmt.Sprintf(ISSUE_INDEX_WITH_COMPLEX_DATATYPES, unsupportedType), "", schemaAnalysisReport, false, "") for _, object := range indexes.Objects { formattedObject := object formattedObject.ObjectName = fmt.Sprintf("%s: %s", strings.ToUpper(unsupportedType), object.ObjectName) @@ -1094,7 +1110,7 @@ func getIndexesOnComplexTypeUnsupportedFeature(schemaAnalysisReport utils.Schema func fetchUnsupportedOracleFeaturesFromSchemaReport(schemaAnalysisReport utils.SchemaReport) ([]UnsupportedFeature, error) { log.Infof("fetching unsupported features for Oracle...") unsupportedFeatures := make([]UnsupportedFeature, 0) - unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(COMPOUND_TRIGGER_FEATURE, COMPOUND_TRIGGER_ISSUE_REASON, "", schemaAnalysisReport, false, "")) + unsupportedFeatures = append(unsupportedFeatures, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.FEATURE, COMPOUND_TRIGGER_FEATURE, COMPOUND_TRIGGER_ISSUE_REASON, "", schemaAnalysisReport, false, "")) return lo.Filter(unsupportedFeatures, func(f UnsupportedFeature, _ int) bool { return len(f.Objects) > 0 }), nil @@ -1131,13 +1147,41 @@ func fetchUnsupportedObjectTypes() ([]UnsupportedFeature, error) { unsupportedIndexes = append(unsupportedIndexes, ObjectInfo{ ObjectName: fmt.Sprintf("Index Name: %s, Index Type=%s", objectName, objectType), }) + + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.FEATURE, + Type: "", // TODO + Name: UNSUPPORTED_INDEXES_FEATURE, + ObjectType: "INDEX", + ObjectName: fmt.Sprintf("Index Name: %s, Index Type=%s", objectName, objectType), + }) } else if objectType == VIRTUAL_COLUMN { virtualColumns = append(virtualColumns, ObjectInfo{ObjectName: objectName}) + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.FEATURE, + Type: "", // TODO + Name: VIRTUAL_COLUMNS_FEATURE, + ObjectName: objectName, + }) } else if objectType == INHERITED_TYPE { inheritedTypes = append(inheritedTypes, ObjectInfo{ObjectName: objectName}) + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.FEATURE, + Type: "", // TODO + Name: INHERITED_TYPES_FEATURE, + ObjectName: objectName, + }) } else if objectType == REFERENCE_PARTITION || objectType == SYSTEM_PARTITION { referenceOrTablePartitionPresent = true unsupportedPartitionTypes = append(unsupportedPartitionTypes, ObjectInfo{ObjectName: fmt.Sprintf("Table Name: %s, Partition Method: %s", objectName, objectType)}) + + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.FEATURE, + Type: "", // TODO + Name: UNSUPPORTED_PARTITIONING_METHODS_FEATURE, + ObjectType: "TABLE", + ObjectName: fmt.Sprintf("Table Name: %s, Partition Method: %s", objectName, objectType), + }) } } @@ -1184,6 +1228,18 @@ func fetchUnsupportedPlPgSQLObjects(schemaAnalysisReport utils.SchemaReport) []U SqlStatement: issue.SqlStatement, }) docsLink = issue.DocsLink + + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.PLPGSQL_OBJECT, + Type: issue.Type, + Name: reason, + Impact: issue.Impact, // TODO: verify(expected already there since underlying issues are assigned) + ObjectType: issue.ObjectType, + ObjectName: issue.ObjectName, + SqlStatement: issue.SqlStatement, + DocsLink: issue.DocsLink, + MinimumVersionFixedIn: issue.MinimumVersionsFixedIn, + }) } feature := UnsupportedFeature{ FeatureName: reason, @@ -1259,6 +1315,16 @@ func fetchUnsupportedQueryConstructs() ([]utils.UnsupportedQueryConstruct, error MinimumVersionsFixedIn: issue.MinimumVersionsFixedIn, } result = append(result, uqc) + + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.QUERY_CONSTRUCT, + Type: issue.Type, + Name: issue.Name, + Impact: issue.Impact, + SqlStatement: issue.SqlStatement, + DocsLink: issue.DocsLink, + MinimumVersionFixedIn: issue.MinimumVersionsFixedIn, + }) } } @@ -1351,6 +1417,27 @@ func fetchColumnsWithUnsupportedDataTypes() ([]utils.TableColumnsDataTypes, []ut return unsupportedDataTypes, unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB, nil } +func getAssessmentIssuesForUnsupportedDatatypes(unsupportedDatatypes []utils.TableColumnsDataTypes) []AssessmentIssue { + var assessmentIssues []AssessmentIssue + for _, colInfo := range unsupportedDatatypes { + qualifiedColName := fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName) + issue := AssessmentIssue{ + Category: constants.DATATYPE, + CategoryDescription: GetCategoryDescription(constants.DATATYPE), + Type: colInfo.DataType, // TODO: maybe name it like "unsupported datatype - geometry" + Name: colInfo.DataType, // TODO: maybe name it like "unsupported datatype - geometry" + Impact: constants.IMPACT_LEVEL_3, + ObjectType: constants.COLUMN, + ObjectName: qualifiedColName, + DocsLink: "", // TODO + MinimumVersionFixedIn: nil, // TODO + } + assessmentIssues = append(assessmentIssues, issue) + } + + return assessmentIssues +} + /* Queries to ignore: - Collected schemas is totally different than source schema list, not containing "" @@ -1439,17 +1526,29 @@ func addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration case POSTGRESQL: log.Infof("add migration caveats to assessment report") migrationCaveats := make([]UnsupportedFeature, 0) - migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(ALTER_PARTITION_ADD_PK_CAVEAT_FEATURE, "", queryissue.ALTER_TABLE_ADD_PK_ON_PARTITIONED_TABLE, + migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, ALTER_PARTITION_ADD_PK_CAVEAT_FEATURE, "", queryissue.ALTER_TABLE_ADD_PK_ON_PARTITIONED_TABLE, schemaAnalysisReport, true, DESCRIPTION_ADD_PK_TO_PARTITION_TABLE)) - migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(FOREIGN_TABLE_CAVEAT_FEATURE, "", queryissue.FOREIGN_TABLE, + migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, FOREIGN_TABLE_CAVEAT_FEATURE, "", queryissue.FOREIGN_TABLE, schemaAnalysisReport, false, DESCRIPTION_FOREIGN_TABLES)) - migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(POLICIES_CAVEAT_FEATURE, "", queryissue.POLICY_WITH_ROLES, + migrationCaveats = append(migrationCaveats, getUnsupportedFeaturesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, POLICIES_CAVEAT_FEATURE, "", queryissue.POLICY_WITH_ROLES, schemaAnalysisReport, false, DESCRIPTION_POLICY_ROLE_DESCRIPTION)) if len(unsupportedDataTypesForLiveMigration) > 0 { columns := make([]ObjectInfo, 0) - for _, col := range unsupportedDataTypesForLiveMigration { - columns = append(columns, ObjectInfo{ObjectName: fmt.Sprintf("%s.%s.%s (%s)", col.SchemaName, col.TableName, col.ColumnName, col.DataType)}) + for _, colInfo := range unsupportedDataTypesForLiveMigration { + columns = append(columns, ObjectInfo{ObjectName: fmt.Sprintf("%s.%s.%s (%s)", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName, colInfo.DataType)}) + + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.MIGRATION_CAVEATS, + CategoryDescription: "", // TODO + Type: UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE, // TODO add object type in type name + Name: "", // TODO + Impact: constants.IMPACT_LEVEL_1, // Caveat - we don't know the migration is offline/online; + Description: UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_DESCRIPTION, + ObjectType: constants.COLUMN, + ObjectName: fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName), + DocsLink: UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, + }) } if len(columns) > 0 { migrationCaveats = append(migrationCaveats, UnsupportedFeature{UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE, columns, false, UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_DESCRIPTION, nil}) @@ -1457,8 +1556,20 @@ func addMigrationCaveatsToAssessmentReport(unsupportedDataTypesForLiveMigration } if len(unsupportedDataTypesForLiveMigrationWithFForFB) > 0 { columns := make([]ObjectInfo, 0) - for _, col := range unsupportedDataTypesForLiveMigrationWithFForFB { - columns = append(columns, ObjectInfo{ObjectName: fmt.Sprintf("%s.%s.%s (%s)", col.SchemaName, col.TableName, col.ColumnName, col.DataType)}) + for _, colInfo := range unsupportedDataTypesForLiveMigrationWithFForFB { + columns = append(columns, ObjectInfo{ObjectName: fmt.Sprintf("%s.%s.%s (%s)", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName, colInfo.DataType)}) + + assessmentReport.AppendIssues(AssessmentIssue{ + Category: constants.MIGRATION_CAVEATS, + CategoryDescription: "", // TODO + Type: UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE, // TODO add object type in type name + Name: "", // TODO + Impact: constants.IMPACT_LEVEL_1, + Description: UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_DESCRIPTION, + ObjectType: constants.COLUMN, + ObjectName: fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName), + DocsLink: UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, + }) } if len(columns) > 0 { migrationCaveats = append(migrationCaveats, UnsupportedFeature{UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE, columns, false, UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_DESCRIPTION, nil}) diff --git a/yb-voyager/cmd/assessMigrationCommandV2.go b/yb-voyager/cmd/assessMigrationCommandV2.go deleted file mode 100644 index 15c0c6bc3..000000000 --- a/yb-voyager/cmd/assessMigrationCommandV2.go +++ /dev/null @@ -1,437 +0,0 @@ -package cmd - -import ( - "fmt" - "strings" - - "github.com/samber/lo" - log "github.com/sirupsen/logrus" - "github.com/yugabyte/yb-voyager/yb-voyager/src/constants" - "github.com/yugabyte/yb-voyager/yb-voyager/src/migassessment" - "github.com/yugabyte/yb-voyager/yb-voyager/src/query/queryissue" - "github.com/yugabyte/yb-voyager/yb-voyager/src/query/queryparser" - "github.com/yugabyte/yb-voyager/yb-voyager/src/utils" - "github.com/yugabyte/yb-voyager/yb-voyager/src/ybversion" - "golang.org/x/exp/slices" -) - -// TODO: content of this function will move to generateAssessmentReport() -// For now this function will fetch all the issues which will be used by the migration complexity determination logic -func fetchAllAssessmentIssues() ([]AssessmentIssue, error) { - var assessmentIssues []AssessmentIssue - - assessmentIssues = append(assessmentIssues, getAssessmentReportContentFromAnalyzeSchemaV2()...) - - issues, err := fetchUnsupportedObjectTypesV2() - if err != nil { - return assessmentIssues, fmt.Errorf("failed to fetch unsupported object type issues: %w", err) - } - assessmentIssues = append(assessmentIssues, issues...) - - if utils.GetEnvAsBool("REPORT_UNSUPPORTED_QUERY_CONSTRUCTS", true) { - issues, err := fetchUnsupportedQueryConstructsV2() - if err != nil { - return assessmentIssues, fmt.Errorf("failed to fetch unsupported queries on YugabyteDB: %w", err) - } - assessmentIssues = append(assessmentIssues, issues...) - } - - unsupportedDataTypes, unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB, err := fetchColumnsWithUnsupportedDataTypes() - if err != nil { - return assessmentIssues, fmt.Errorf("failed to fetch columns with unsupported data types: %w", err) - } - - assessmentIssues = append(assessmentIssues, getAssessmentIssuesForUnsupportedDatatypes(unsupportedDataTypes)...) - - assessmentIssues = append(assessmentIssues, fetchMigrationCaveatAssessmentIssues(unsupportedDataTypesForLiveMigration, unsupportedDataTypesForLiveMigrationWithFForFB)...) - - return assessmentIssues, nil -} - -// TODO: replacement of getAssessmentReportContentFromAnalyzeSchema() -func getAssessmentReportContentFromAnalyzeSchemaV2() []AssessmentIssue { - /* - Here we are generating analyze schema report which converts issue instance to analyze schema issue - Then in assessment codepath we extract the required information from analyze schema issue which could have been done directly from issue instance(TODO) - - But current Limitation is analyze schema currently uses regexp etc to detect some issues(not using parser). - */ - schemaAnalysisReport := analyzeSchemaInternal(&source, true) - assessmentReport.MigrationComplexity = schemaAnalysisReport.MigrationComplexity - assessmentReport.SchemaSummary = schemaAnalysisReport.SchemaSummary - assessmentReport.SchemaSummary.Description = lo.Ternary(source.DBType == ORACLE, SCHEMA_SUMMARY_DESCRIPTION_ORACLE, SCHEMA_SUMMARY_DESCRIPTION) - - var assessmentIssues []AssessmentIssue - switch source.DBType { - case ORACLE: - assessmentIssues = append(assessmentIssues, fetchUnsupportedOracleFeaturesFromSchemaReportV2(schemaAnalysisReport)...) - case POSTGRESQL: - assessmentIssues = append(assessmentIssues, fetchUnsupportedPGFeaturesFromSchemaReportV2(schemaAnalysisReport)...) - default: - panic(fmt.Sprintf("unsupported source db type %q", source.DBType)) - } - - // Ques: Do we still need this and REPORT_UNSUPPORTED_QUERY_CONSTRUCTS env var - if utils.GetEnvAsBool("REPORT_UNSUPPORTED_PLPGSQL_OBJECTS", true) { - assessmentIssues = append(assessmentIssues, fetchUnsupportedPlPgSQLObjectsV2(schemaAnalysisReport)...) - } - - return assessmentIssues -} - -// TODO: will replace fetchUnsupportedOracleFeaturesFromSchemaReport() -func fetchUnsupportedOracleFeaturesFromSchemaReportV2(schemaAnalysisReport utils.SchemaReport) []AssessmentIssue { - log.Infof("fetching assessment issues of feature category for Oracle...") - var assessmentIssues []AssessmentIssue - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", COMPOUND_TRIGGER_ISSUE_REASON, schemaAnalysisReport, "")...) - return assessmentIssues -} - -// TODO: will replace fetchUnsupportedPGFeaturesFromSchemaReport() -func fetchUnsupportedPGFeaturesFromSchemaReportV2(schemaAnalysisReport utils.SchemaReport) []AssessmentIssue { - log.Infof("fetching assessment issues of feature category for PG...") - - var assessmentIssues []AssessmentIssue - for _, indexMethod := range queryissue.UnsupportedIndexMethods { - displayIndexMethod := strings.ToUpper(indexMethod) - reason := fmt.Sprintf(INDEX_METHOD_ISSUE_REASON, displayIndexMethod) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", reason, schemaAnalysisReport, "")...) - } - - assessmentIssues = append(assessmentIssues, getIndexesOnComplexTypeUnsupportedFeatureV2(schemaAnalysisReport, queryissue.UnsupportedIndexDatatypes)...) - - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.CONSTRAINT_TRIGGER, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.INHERITANCE, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.STORED_GENERATED_COLUMNS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", CONVERSION_ISSUE_REASON, schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.MULTI_COLUMN_GIN_INDEX, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ALTER_TABLE_SET_COLUMN_ATTRIBUTE, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ALTER_TABLE_DISABLE_RULE, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ALTER_TABLE_CLUSTER_ON, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.STORAGE_PARAMETER, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", UNSUPPORTED_EXTENSION_ISSUE, schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.EXCLUSION_CONSTRAINTS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.DEFERRABLE_CONSTRAINTS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", VIEW_CHECK_OPTION_ISSUE, schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.PK_UK_ON_COMPLEX_DATATYPE, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.UNLOGGED_TABLE, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.REFERENCING_CLAUSE_IN_TRIGGER, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.BEFORE_ROW_TRIGGER_ON_PARTITIONED_TABLE, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.ADVISORY_LOCKS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.XML_FUNCTIONS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.SYSTEM_COLUMNS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.LARGE_OBJECT_FUNCTIONS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.REGEX_FUNCTIONS, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.FETCH_WITH_TIES, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_QUERY_FUNCTION, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.JSON_CONSTRUCTOR_FUNCTION, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.AGGREGATE_FUNCTION, "", schemaAnalysisReport, "")...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, queryissue.SECURITY_INVOKER_VIEWS, "", schemaAnalysisReport, "")...) - - return assessmentIssues -} - -func getIndexesOnComplexTypeUnsupportedFeatureV2(schemaAnalysisReport utils.SchemaReport, unsupportedIndexDatatypes []string) []AssessmentIssue { - var assessmentIssues []AssessmentIssue - // TODO: include MinimumVersionsFixedIn - - unsupportedIndexDatatypes = append(unsupportedIndexDatatypes, "array") // adding it here only as we know issue form analyze will come with type - unsupportedIndexDatatypes = append(unsupportedIndexDatatypes, "user_defined_type") // adding it here as we UDTs will come with this type. - for _, unsupportedType := range unsupportedIndexDatatypes { - // formattedObject.ObjectName = fmt.Sprintf("%s: %s", strings.ToUpper(unsupportedType), object.ObjectName) - issueReason := fmt.Sprintf(ISSUE_INDEX_WITH_COMPLEX_DATATYPES, unsupportedType) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.FEATURE, "", issueReason, schemaAnalysisReport, "")...) - } - return assessmentIssues -} - -// TODO: replacement of fetchUnsupportedObjectTypes() -func fetchUnsupportedObjectTypesV2() ([]AssessmentIssue, error) { - if source.DBType != ORACLE { - return nil, nil - } - - query := fmt.Sprintf(`SELECT schema_name, object_name, object_type FROM %s`, migassessment.OBJECT_TYPE_MAPPING) - rows, err := assessmentDB.Query(query) - if err != nil { - return nil, fmt.Errorf("error querying-%s: %w", query, err) - } - defer func() { - closeErr := rows.Close() - if closeErr != nil { - log.Warnf("error closing rows while fetching object type mapping metadata: %v", err) - } - }() - - var assessmentIssues []AssessmentIssue - for rows.Next() { - var schemaName, objectName, objectType string - err = rows.Scan(&schemaName, &objectName, &objectType) - if err != nil { - return nil, fmt.Errorf("error scanning rows:%w", err) - } - - // For these oracle issues defining Impact not required, since oracle migration complexity comes from ora2pg - switch { - case slices.Contains(OracleUnsupportedIndexTypes, objectType): - assessmentIssues = append(assessmentIssues, AssessmentIssue{ - Category: constants.FEATURE, - Type: "", // TODO - Name: UNSUPPORTED_INDEXES_FEATURE, - ObjectType: "INDEX", - ObjectName: fmt.Sprintf("Index Name: %s, Index Type=%s", objectName, objectType), - }) - case objectType == VIRTUAL_COLUMN: - assessmentIssues = append(assessmentIssues, AssessmentIssue{ - Category: constants.FEATURE, - Type: "", // TODO - Name: VIRTUAL_COLUMNS_FEATURE, - ObjectName: objectName, - }) - case objectType == INHERITED_TYPE: - assessmentIssues = append(assessmentIssues, AssessmentIssue{ - Category: constants.FEATURE, - Type: "", // TODO - Name: INHERITED_TYPES_FEATURE, - ObjectName: objectName, - }) - case objectType == REFERENCE_PARTITION || objectType == SYSTEM_PARTITION: - referenceOrTablePartitionPresent = true - assessmentIssues = append(assessmentIssues, AssessmentIssue{ - Category: constants.FEATURE, - Type: "", // TODO - Name: UNSUPPORTED_PARTITIONING_METHODS_FEATURE, - ObjectType: "TABLE", - ObjectName: fmt.Sprintf("Table Name: %s, Partition Method: %s", objectName, objectType), - }) - } - } - - return assessmentIssues, nil -} - -// // TODO: replacement of fetchUnsupportedPlPgSQLObjects() -func fetchUnsupportedPlPgSQLObjectsV2(schemaAnalysisReport utils.SchemaReport) []AssessmentIssue { - if source.DBType != POSTGRESQL { - return nil - } - - plpgsqlIssues := lo.Filter(schemaAnalysisReport.Issues, func(issue utils.AnalyzeSchemaIssue, _ int) bool { - return issue.IssueType == UNSUPPORTED_PLPGSQL_OBJECTS - }) - groupedPlpgsqlIssuesByReason := lo.GroupBy(plpgsqlIssues, func(issue utils.AnalyzeSchemaIssue) string { - return issue.Reason - }) - - var assessmentIssues []AssessmentIssue - for reason, issues := range groupedPlpgsqlIssuesByReason { - var minVersionsFixedIn map[string]*ybversion.YBVersion - var minVersionsFixedInSet bool - - for _, issue := range issues { - if !minVersionsFixedInSet { - minVersionsFixedIn = issue.MinimumVersionsFixedIn - minVersionsFixedInSet = true - } - if !areMinVersionsFixedInEqual(minVersionsFixedIn, issue.MinimumVersionsFixedIn) { - utils.ErrExit("Issues belonging to UnsupportedFeature %s have different minimum versions fixed in: %v, %v", reason, minVersionsFixedIn, issue.MinimumVersionsFixedIn) - } - - assessmentIssues = append(assessmentIssues, AssessmentIssue{ - Category: constants.PLPGSQL_OBJECT, - Type: issue.Type, - Name: reason, - Impact: issue.Impact, // TODO: verify(expected already there since underlying issues are assigned) - ObjectType: issue.ObjectType, - ObjectName: issue.ObjectName, - SqlStatement: issue.SqlStatement, - DocsLink: issue.DocsLink, - MinimumVersionFixedIn: issue.MinimumVersionsFixedIn, - }) - } - } - - return assessmentIssues -} - -// Q: do we no need of displayDDLInHTML in this approach? DDL can always be there for issues in the table. -func getAssessmentIssuesFromSchemaAnalysisReport(category string, issueType string, issueReason string, schemaAnalysisReport utils.SchemaReport, issueDescription string) []AssessmentIssue { - log.Infof("filtering issues for type: %s", issueType) - var issues []AssessmentIssue - var minVersionsFixedIn map[string]*ybversion.YBVersion - var minVersionsFixedInSet bool - for _, analyzeIssue := range schemaAnalysisReport.Issues { - if !slices.Contains([]string{UNSUPPORTED_FEATURES, MIGRATION_CAVEATS}, analyzeIssue.IssueType) { - continue - } - - issueMatched := lo.Ternary[bool](issueType != "", issueType == analyzeIssue.Type, strings.Contains(analyzeIssue.Reason, issueReason)) - if issueMatched { - if !minVersionsFixedInSet { - minVersionsFixedIn = analyzeIssue.MinimumVersionsFixedIn - minVersionsFixedInSet = true - } - if !areMinVersionsFixedInEqual(minVersionsFixedIn, analyzeIssue.MinimumVersionsFixedIn) { - utils.ErrExit("Issues belonging to type %s have different minimum versions fixed in: %v, %v", analyzeIssue.Type, minVersionsFixedIn, analyzeIssue.MinimumVersionsFixedIn) - } - - issues = append(issues, AssessmentIssue{ - Category: category, - CategoryDescription: GetCategoryDescription(category), - Type: analyzeIssue.Type, - Name: analyzeIssue.Reason, // in convertIssueInstanceToAnalyzeIssue() we assign IssueType to Reason field - Description: issueDescription, // TODO: verify - Impact: analyzeIssue.Impact, - ObjectType: analyzeIssue.ObjectType, - ObjectName: analyzeIssue.ObjectName, - SqlStatement: analyzeIssue.SqlStatement, - DocsLink: analyzeIssue.DocsLink, - MinimumVersionFixedIn: minVersionsFixedIn, - }) - } - } - - return issues -} - -// TODO: soon to replace fetchUnsupportedQueryConstructs() -func fetchUnsupportedQueryConstructsV2() ([]AssessmentIssue, error) { - if source.DBType != POSTGRESQL { - return nil, nil - } - - query := fmt.Sprintf("SELECT DISTINCT query from %s", migassessment.DB_QUERIES_SUMMARY) - rows, err := assessmentDB.Query(query) - if err != nil { - return nil, fmt.Errorf("error querying=%s on assessmentDB: %w", query, err) - } - defer func() { - closeErr := rows.Close() - if closeErr != nil { - log.Warnf("error closing rows while fetching database queries summary metadata: %v", err) - } - }() - - var executedQueries []string - for rows.Next() { - var executedQuery string - err := rows.Scan(&executedQuery) - if err != nil { - return nil, fmt.Errorf("error scanning rows: %w", err) - } - executedQueries = append(executedQueries, executedQuery) - } - - if len(executedQueries) == 0 { - log.Infof("queries info not present in the assessment metadata for detecting unsupported query constructs") - return nil, nil - } - - var assessmentIssues []AssessmentIssue - for i := 0; i < len(executedQueries); i++ { - query := executedQueries[i] - log.Debugf("fetching unsupported query constructs for query - [%s]", query) - collectedSchemaList, err := queryparser.GetSchemaUsed(query) - if err != nil { // no need to error out if failed to get schemas for a query - log.Errorf("failed to get schemas used for query [%s]: %v", query, err) - continue - } - - log.Infof("collected schema list %v(len=%d) for query [%s]", collectedSchemaList, len(collectedSchemaList), query) - if !considerQueryForIssueDetection(collectedSchemaList) { - log.Infof("ignoring query due to difference in collected schema list %v(len=%d) vs source schema list %v(len=%d)", - collectedSchemaList, len(collectedSchemaList), source.GetSchemaList(), len(source.GetSchemaList())) - continue - } - - issues, err := parserIssueDetector.GetDMLIssues(query, targetDbVersion) - if err != nil { - log.Errorf("failed while trying to fetch query issues in query - [%s]: %v", - query, err) - } - - for _, issue := range issues { - issue := AssessmentIssue{ - Category: constants.QUERY_CONSTRUCT, - Type: issue.Type, - Name: issue.Name, - Impact: issue.Impact, - SqlStatement: issue.SqlStatement, - DocsLink: issue.DocsLink, - MinimumVersionFixedIn: issue.MinimumVersionsFixedIn, - } - assessmentIssues = append(assessmentIssues, issue) - } - } - - return assessmentIssues, nil -} - -func getAssessmentIssuesForUnsupportedDatatypes(unsupportedDatatypes []utils.TableColumnsDataTypes) []AssessmentIssue { - var assessmentIssues []AssessmentIssue - for _, colInfo := range unsupportedDatatypes { - qualifiedColName := fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName) - issue := AssessmentIssue{ - Category: constants.DATATYPE, - CategoryDescription: GetCategoryDescription(constants.DATATYPE), - Type: colInfo.DataType, // TODO: maybe name it like "unsupported datatype - geometry" - Name: colInfo.DataType, // TODO: maybe name it like "unsupported datatype - geometry" - Impact: constants.IMPACT_LEVEL_3, - ObjectType: constants.COLUMN, - ObjectName: qualifiedColName, - DocsLink: "", // TODO - MinimumVersionFixedIn: nil, // TODO - } - assessmentIssues = append(assessmentIssues, issue) - } - - return assessmentIssues -} - -// TODO: soon to replace addMigrationCaveatsToAssessmentReport() -func fetchMigrationCaveatAssessmentIssues(unsupportedDataTypesForLiveMigration []utils.TableColumnsDataTypes, unsupportedDataTypesForLiveMigrationWithFForFB []utils.TableColumnsDataTypes) []AssessmentIssue { - var assessmentIssues []AssessmentIssue - switch source.DBType { - case POSTGRESQL: - log.Infof("fetching migration caveat category assessment issues") - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, queryissue.ALTER_TABLE_ADD_PK_ON_PARTITIONED_TABLE, "", schemaAnalysisReport, DESCRIPTION_ADD_PK_TO_PARTITION_TABLE)...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, queryissue.FOREIGN_TABLE, "", schemaAnalysisReport, DESCRIPTION_FOREIGN_TABLES)...) - assessmentIssues = append(assessmentIssues, getAssessmentIssuesFromSchemaAnalysisReport(constants.MIGRATION_CAVEATS, queryissue.POLICY_WITH_ROLES, "", schemaAnalysisReport, DESCRIPTION_POLICY_ROLE_DESCRIPTION)...) - - if len(unsupportedDataTypesForLiveMigration) > 0 { - for _, colInfo := range unsupportedDataTypesForLiveMigration { - issue := AssessmentIssue{ - Category: constants.MIGRATION_CAVEATS, - CategoryDescription: "", // TODO - Type: UNSUPPORTED_DATATYPES_LIVE_CAVEAT_FEATURE, // TODO add object type in type name - Name: "", // TODO - Impact: constants.IMPACT_LEVEL_1, // Caveat - we don't know the migration is offline/online; - Description: UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_DESCRIPTION, - ObjectType: constants.COLUMN, - ObjectName: fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName), - DocsLink: UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, - } - assessmentIssues = append(assessmentIssues, issue) - } - } - - if len(unsupportedDataTypesForLiveMigrationWithFForFB) > 0 { - for _, colInfo := range unsupportedDataTypesForLiveMigrationWithFForFB { - issue := AssessmentIssue{ - Category: constants.MIGRATION_CAVEATS, - CategoryDescription: "", // TODO - Type: UNSUPPORTED_DATATYPES_LIVE_WITH_FF_FB_CAVEAT_FEATURE, // TODO add object type in type name - Name: "", // TODO - Impact: constants.IMPACT_LEVEL_1, - Description: UNSUPPORTED_DATATYPES_FOR_LIVE_MIGRATION_WITH_FF_FB_DESCRIPTION, - ObjectType: constants.COLUMN, - ObjectName: fmt.Sprintf("%s.%s.%s", colInfo.SchemaName, colInfo.TableName, colInfo.ColumnName), - DocsLink: UNSUPPORTED_DATATYPE_LIVE_MIGRATION_DOC_LINK, - } - assessmentIssues = append(assessmentIssues, issue) - } - } - } - - return assessmentIssues -} diff --git a/yb-voyager/cmd/common.go b/yb-voyager/cmd/common.go index 064d80262..2542c14d1 100644 --- a/yb-voyager/cmd/common.go +++ b/yb-voyager/cmd/common.go @@ -1315,6 +1315,10 @@ func ParseJSONToAssessmentReport(reportPath string) (*AssessmentReport, error) { return &report, nil } +func (ar *AssessmentReport) AppendIssues(issues ...AssessmentIssue) { + ar.Issues = append(ar.Issues, issues...) +} + func (ar *AssessmentReport) GetShardedTablesRecommendation() ([]string, error) { if ar.Sizing == nil { return nil, fmt.Errorf("sizing report is null, can't fetch sharded tables")