diff --git a/migtests/tests/pg/assessment-report-test/expectedAssessmentReport.json b/migtests/tests/pg/assessment-report-test/expectedAssessmentReport.json index 1057ad9fd..5a861d641 100644 --- a/migtests/tests/pg/assessment-report-test/expectedAssessmentReport.json +++ b/migtests/tests/pg/assessment-report-test/expectedAssessmentReport.json @@ -38,15 +38,15 @@ }, { "ObjectType": "SEQUENCE", - "TotalCount": 28, + "TotalCount": 29, "InvalidCount": 0, - "ObjectNames": "public.ordersentry_order_id_seq, public.\"Case_Sensitive_Columns_id_seq\", public.\"Mixed_Case_Table_Name_Test_id_seq\", public.\"Recipients_id_seq\", public.\"WITH_id_seq\", public.employees_employee_id_seq, public.employees2_id_seq, public.ext_test_id_seq, public.mixed_data_types_table1_id_seq, public.mixed_data_types_table2_id_seq, public.orders2_id_seq, public.parent_table_id_seq, public.with_example1_id_seq, public.with_example2_id_seq, schema2.\"Case_Sensitive_Columns_id_seq\", schema2.\"Mixed_Case_Table_Name_Test_id_seq\", schema2.\"Recipients_id_seq\", schema2.\"WITH_id_seq\", schema2.employees2_id_seq, schema2.ext_test_id_seq, schema2.mixed_data_types_table1_id_seq, schema2.mixed_data_types_table2_id_seq, schema2.orders2_id_seq, schema2.parent_table_id_seq, schema2.with_example1_id_seq, schema2.with_example2_id_seq, test_views.view_table1_id_seq, test_views.view_table2_id_seq" + "ObjectNames": "public.\"Case_Sensitive_Columns_id_seq\", public.\"Mixed_Case_Table_Name_Test_id_seq\", public.\"Recipients_id_seq\", public.\"WITH_id_seq\", public.employees2_id_seq, public.employees3_id_seq, public.employees_employee_id_seq, public.ext_test_id_seq, public.mixed_data_types_table1_id_seq, public.mixed_data_types_table2_id_seq, public.orders2_id_seq, public.ordersentry_order_id_seq, public.parent_table_id_seq, public.with_example1_id_seq, public.with_example2_id_seq, schema2.\"Case_Sensitive_Columns_id_seq\", schema2.\"Mixed_Case_Table_Name_Test_id_seq\", schema2.\"Recipients_id_seq\", schema2.\"WITH_id_seq\", schema2.employees2_id_seq, schema2.ext_test_id_seq, schema2.mixed_data_types_table1_id_seq, schema2.mixed_data_types_table2_id_seq, schema2.orders2_id_seq, schema2.parent_table_id_seq, schema2.with_example1_id_seq, schema2.with_example2_id_seq, test_views.view_table1_id_seq, test_views.view_table2_id_seq" }, { "ObjectType": "TABLE", - "TotalCount": 67, + "TotalCount": 68, "InvalidCount": 23, - "ObjectNames": "public.ordersentry, public.library_nested, public.orders_lateral, public.\"Case_Sensitive_Columns\", public.\"Mixed_Case_Table_Name_Test\", public.\"Recipients\", public.\"WITH\", public.audit, public.sales_region, public.boston, public.c, public.parent_table, public.child_table, public.citext_type, public.combined_tbl, public.documents, public.employees2, public.ext_test, public.foo, public.inet_type, public.london, public.mixed_data_types_table1, public.mixed_data_types_table2, public.orders, public.orders2, public.products, public.session_log, public.session_log1, public.session_log2, public.sydney, public.test_exclude_basic, public.test_jsonb, public.test_xml_type, public.ts_query_table, public.tt, public.with_example1, public.with_example2, schema2.\"Case_Sensitive_Columns\", schema2.\"Mixed_Case_Table_Name_Test\", schema2.\"Recipients\", schema2.\"WITH\", schema2.audit, schema2.sales_region, schema2.boston, schema2.c, schema2.parent_table, schema2.child_table, schema2.employees2, schema2.ext_test, schema2.foo, schema2.london, schema2.mixed_data_types_table1, schema2.mixed_data_types_table2, schema2.orders, schema2.orders2, schema2.products, schema2.session_log, schema2.session_log1, schema2.session_log2, schema2.sydney, schema2.test_xml_type, schema2.tt, schema2.with_example1, schema2.with_example2, test_views.view_table1, test_views.view_table2, public.employees" + "ObjectNames": "public.employees3, public.ordersentry, public.library_nested, public.orders_lateral, public.\"Case_Sensitive_Columns\", public.\"Mixed_Case_Table_Name_Test\", public.\"Recipients\", public.\"WITH\", public.audit, public.sales_region, public.boston, public.c, public.parent_table, public.child_table, public.citext_type, public.combined_tbl, public.documents, public.employees2, public.ext_test, public.foo, public.inet_type, public.london, public.mixed_data_types_table1, public.mixed_data_types_table2, public.orders, public.orders2, public.products, public.session_log, public.session_log1, public.session_log2, public.sydney, public.test_exclude_basic, public.test_jsonb, public.test_xml_type, public.ts_query_table, public.tt, public.with_example1, public.with_example2, schema2.\"Case_Sensitive_Columns\", schema2.\"Mixed_Case_Table_Name_Test\", schema2.\"Recipients\", schema2.\"WITH\", schema2.audit, schema2.sales_region, schema2.boston, schema2.c, schema2.parent_table, schema2.child_table, schema2.employees2, schema2.ext_test, schema2.foo, schema2.london, schema2.mixed_data_types_table1, schema2.mixed_data_types_table2, schema2.orders, schema2.orders2, schema2.products, schema2.session_log, schema2.session_log1, schema2.session_log2, schema2.sydney, schema2.test_xml_type, schema2.tt, schema2.with_example1, schema2.with_example2, test_views.view_table1, test_views.view_table2, public.employees" }, { "ObjectType": "INDEX", @@ -170,9 +170,10 @@ "test_views.view_table1", "public.library_nested", "public.orders_lateral", - "public.employees" + "public.employees", + "public.employees3" ], - "ColocatedReasoning": "Recommended instance type with 4 vCPU and 16 GiB memory could fit 73 objects (65 tables/materialized views and 8 explicit/implicit indexes) with 0.00 MB size and throughput requirement of 0 reads/sec and 0 writes/sec as colocated. Rest 28 objects (5 tables/materialized views and 23 explicit/implicit indexes) with 0.00 MB size and throughput requirement of 0 reads/sec and 0 writes/sec need to be migrated as range partitioned tables. Non leaf partition tables/indexes and unsupported tables/indexes were not considered.", + "ColocatedReasoning": "Recommended instance type with 4 vCPU and 16 GiB memory could fit 74 objects (66 tables/materialized views and 8 explicit/implicit indexes) with 0.00 MB size and throughput requirement of 0 reads/sec and 0 writes/sec as colocated. Rest 28 objects (5 tables/materialized views and 23 explicit/implicit indexes) with 0.00 MB size and throughput requirement of 0 reads/sec and 0 writes/sec need to be migrated as range partitioned tables. Non leaf partition tables/indexes and unsupported tables/indexes were not considered.", "ShardedTables": [ "public.combined_tbl", "public.citext_type", @@ -1983,6 +1984,20 @@ "ObjectType": "", "ParentTableName": "schema2.mixed_data_types_table1", "SizeInBytes": 8192 + }, + { + "SchemaName": "public", + "ObjectName": "employees3", + "RowCount": 2, + "ColumnCount": 3, + "Reads": 0, + "Writes": 2, + "ReadsPerSecond": 0, + "WritesPerSecond": 0, + "IsIndex": false, + "ObjectType": "", + "ParentTableName": null, + "SizeInBytes": 8192 } ], "Notes": [ @@ -2182,6 +2197,12 @@ "Query": "SELECT lo_create($1)", "DocsLink": "", "MinimumVersionsFixedIn": null + }, + { + "ConstructTypeName": "COPY FROM ... WHERE", + "Query": "COPY employees3 (id, name, age)\nFROM STDIN WITH (FORMAT csv)\nWHERE age \u003e 30", + "DocsLink": "", + "MinimumVersionsFixedIn": null } ], "UnsupportedPlPgSqlObjects": [ @@ -2231,4 +2252,4 @@ "MinimumVersionsFixedIn": null } ] -} \ No newline at end of file +} diff --git a/migtests/tests/pg/assessment-report-test/unsupported_query_constructs.sql b/migtests/tests/pg/assessment-report-test/unsupported_query_constructs.sql index cc7251039..284c8f7d0 100644 --- a/migtests/tests/pg/assessment-report-test/unsupported_query_constructs.sql +++ b/migtests/tests/pg/assessment-report-test/unsupported_query_constructs.sql @@ -138,4 +138,32 @@ FROM ) AS items; -SELECT lo_create('32142'); \ No newline at end of file +SELECT lo_create('32142'); + +-- Unsupported COPY constructs + +CREATE TABLE IF NOT EXISTS employees3 ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + age INT NOT NULL +); + + +-- COPY FROM with WHERE clause +COPY employees3 (id, name, age) +FROM STDIN WITH (FORMAT csv) +WHERE age > 30; +1,John Smith,25 +2,Jane Doe,34 +3,Bob Johnson,31 +\. + +-- This can be uncommented when we start using PG 17 or later in the tests +-- -- COPY with ON_ERROR clause +-- COPY employees (id, name, age) +-- FROM STDIN WITH (FORMAT csv, ON_ERROR IGNORE ); +-- 4,Adam Smith,22 +-- 5,John Doe,34 +-- 6,Ron Johnson,31 +-- \. + diff --git a/yb-voyager/src/query/queryissue/constants.go b/yb-voyager/src/query/queryissue/constants.go index f0f86239a..8d319777e 100644 --- a/yb-voyager/src/query/queryissue/constants.go +++ b/yb-voyager/src/query/queryissue/constants.go @@ -70,6 +70,8 @@ const ( XML_FUNCTIONS_NAME = "XML Functions" REGEX_FUNCTIONS = "REGEX_FUNCTIONS" + COPY_FROM_WHERE = "COPY FROM ... WHERE" + COPY_ON_ERROR = "COPY ... ON_ERROR" ) // Object types diff --git a/yb-voyager/src/query/queryissue/detectors.go b/yb-voyager/src/query/queryissue/detectors.go index 8a57c581d..e6ec4c4cc 100644 --- a/yb-voyager/src/query/queryissue/detectors.go +++ b/yb-voyager/src/query/queryissue/detectors.go @@ -16,6 +16,8 @@ limitations under the License. package queryissue import ( + "slices" + mapset "github.com/deckarep/golang-set/v2" log "github.com/sirupsen/logrus" "google.golang.org/protobuf/reflect/protoreflect" @@ -204,6 +206,55 @@ func (d *RangeTableFuncDetector) GetIssues() []QueryIssue { return issues } +type CopyCommandUnsupportedConstructsDetector struct { + query string + copyFromWhereConstructDetected bool + copyOnErrorConstructDetected bool +} + +func NewCopyCommandUnsupportedConstructsDetector(query string) *CopyCommandUnsupportedConstructsDetector { + return &CopyCommandUnsupportedConstructsDetector{ + query: query, + } +} + +// Detect if COPY command uses unsupported syntax i.e. COPY FROM ... WHERE and COPY... ON_ERROR +func (d *CopyCommandUnsupportedConstructsDetector) Detect(msg protoreflect.Message) error { + // Check if the message is a COPY statement + if msg.Descriptor().FullName() != queryparser.PG_QUERY_COPYSTSMT_NODE { + return nil // Not a COPY statement, nothing to detect + } + + // Check for COPY FROM ... WHERE clause + fromField := queryparser.GetBoolField(msg, "is_from") + whereField := queryparser.GetMessageField(msg, "where_clause") + if fromField && whereField != nil { + d.copyFromWhereConstructDetected = true + } + + // Check for COPY ... ON_ERROR clause + defNames, err := queryparser.TraverseAndExtractDefNamesFromDefElem(msg) + if err != nil { + log.Errorf("error extracting defnames from COPY statement: %v", err) + } + if slices.Contains(defNames, "on_error") { + d.copyOnErrorConstructDetected = true + } + + return nil +} + +func (d *CopyCommandUnsupportedConstructsDetector) GetIssues() []QueryIssue { + var issues []QueryIssue + if d.copyFromWhereConstructDetected { + issues = append(issues, NewCopyFromWhereIssue(DML_QUERY_OBJECT_TYPE, "", d.query)) + } + if d.copyOnErrorConstructDetected { + issues = append(issues, NewCopyOnErrorIssue(DML_QUERY_OBJECT_TYPE, "", d.query)) + } + return issues +} + type JsonConstructorFuncDetector struct { query string unsupportedJsonConstructorFunctionsDetected mapset.Set[string] @@ -253,19 +304,19 @@ func NewJsonQueryFunctionDetector(query string) *JsonQueryFunctionDetector { func (j *JsonQueryFunctionDetector) Detect(msg protoreflect.Message) error { if queryparser.GetMsgFullName(msg) == queryparser.PG_QUERY_JSON_TABLE_NODE { /* - SELECT * FROM json_table( - '[{"a":10,"b":20},{"a":30,"b":40}]'::jsonb, - '$[*]' - COLUMNS ( - column_a int4 path '$.a', - column_b int4 path '$.b' - ) - ); - stmts:{stmt:{select_stmt:{target_list:{res_target:{val:{column_ref:{fields:{a_star:{}} location:530}} location:530}} - from_clause:{json_table:{context_item:{raw_expr:{type_cast:{arg:{a_const:{sval:{sval:"[{\"a\":10,\"b\":20},{\"a\":30,\"b\":40}]"} - location:553}} type_name:{names:{string:{sval:"jsonb"}} ..... name_location:-1 location:601} - columns:{json_table_column:{coltype:JTC_REGULAR name:"column_a" type_name:{names:{string:{sval:"int4"}} typemod:-1 location:639} - pathspec:{string:{a_const:{sval:{sval:"$.a"} location:649}} name_location:-1 location:649} ... + SELECT * FROM json_table( + '[{"a":10,"b":20},{"a":30,"b":40}]'::jsonb, + '$[*]' + COLUMNS ( + column_a int4 path '$.a', + column_b int4 path '$.b' + ) + ); + stmts:{stmt:{select_stmt:{target_list:{res_target:{val:{column_ref:{fields:{a_star:{}} location:530}} location:530}} + from_clause:{json_table:{context_item:{raw_expr:{type_cast:{arg:{a_const:{sval:{sval:"[{\"a\":10,\"b\":20},{\"a\":30,\"b\":40}]"} + location:553}} type_name:{names:{string:{sval:"jsonb"}} ..... name_location:-1 location:601} + columns:{json_table_column:{coltype:JTC_REGULAR name:"column_a" type_name:{names:{string:{sval:"int4"}} typemod:-1 location:639} + pathspec:{string:{a_const:{sval:{sval:"$.a"} location:649}} name_location:-1 location:649} ... */ j.unsupportedJsonQueryFunctionsDetected.Add(JSON_TABLE) return nil diff --git a/yb-voyager/src/query/queryissue/issues_dml.go b/yb-voyager/src/query/queryissue/issues_dml.go index 43dc8ab07..e6915177c 100644 --- a/yb-voyager/src/query/queryissue/issues_dml.go +++ b/yb-voyager/src/query/queryissue/issues_dml.go @@ -141,3 +141,29 @@ func NewLOFuntionsIssue(objectType string, objectName string, sqlStatement strin } return newQueryIssue(loFunctionsIssue, objectType, objectName, sqlStatement, details) } + +var copyFromWhereIssue = issue.Issue{ + Type: COPY_FROM_WHERE, + TypeName: "COPY FROM ... WHERE", + TypeDescription: "", + Suggestion: "", + GH: "", + DocsLink: "", +} + +func NewCopyFromWhereIssue(objectType string, objectName string, sqlStatement string) QueryIssue { + return newQueryIssue(copyFromWhereIssue, objectType, objectName, sqlStatement, map[string]interface{}{}) +} + +var copyOnErrorIssue = issue.Issue{ + Type: COPY_ON_ERROR, + TypeName: "COPY ... ON_ERROR", + TypeDescription: "", + Suggestion: "", + GH: "", + DocsLink: "", +} + +func NewCopyOnErrorIssue(objectType string, objectName string, sqlStatement string) QueryIssue { + return newQueryIssue(copyOnErrorIssue, objectType, objectName, sqlStatement, map[string]interface{}{}) +} diff --git a/yb-voyager/src/query/queryissue/issues_dml_test.go b/yb-voyager/src/query/queryissue/issues_dml_test.go index 5b0cd0151..d24d4f194 100644 --- a/yb-voyager/src/query/queryissue/issues_dml_test.go +++ b/yb-voyager/src/query/queryissue/issues_dml_test.go @@ -58,6 +58,29 @@ func testRegexFunctionsIssue(t *testing.T) { } } +func testCopyOnErrorIssue(t *testing.T) { + ctx := context.Background() + conn, err := getConn() + assert.NoError(t, err) + + defer conn.Close(context.Background()) + // In case the COPY ... ON_ERROR construct gets supported in the future, this test will fail with a different error message-something related to the data.csv file not being found. + _, err = conn.Exec(ctx, `COPY pg_largeobject (loid, pageno, data) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR IGNORE);`) + assertErrorCorrectlyThrownForIssueForYBVersion(t, err, "ERROR: option \"on_error\" not recognized (SQLSTATE 42601)", copyOnErrorIssue) + +} + +func testCopyFromWhereIssue(t *testing.T) { + ctx := context.Background() + conn, err := getConn() + assert.NoError(t, err) + + defer conn.Close(context.Background()) + // In case the COPY FROM ... WHERE construct gets supported in the future, this test will fail with a different error message-something related to the data.csv file not being found. + _, err = conn.Exec(ctx, `COPY pg_largeobject (loid, pageno, data) FROM '/path/to/data.csv' WHERE loid = 1 WITH (FORMAT csv, HEADER true);`) + assertErrorCorrectlyThrownForIssueForYBVersion(t, err, "ERROR: syntax error at or near \"WHERE\" (SQLSTATE 42601)", copyFromWhereIssue) +} + func testJsonConstructorFunctions(t *testing.T) { ctx := context.Background() conn, err := getConn() @@ -145,6 +168,11 @@ func TestDMLIssuesInYBVersion(t *testing.T) { success = t.Run(fmt.Sprintf("%s-%s", "regex functions", ybVersion), testRegexFunctionsIssue) assert.True(t, success) + success = t.Run(fmt.Sprintf("%s-%s", "copy on error", ybVersion), testCopyOnErrorIssue) + assert.True(t, success) + + success = t.Run(fmt.Sprintf("%s-%s", "copy from where", ybVersion), testCopyFromWhereIssue) + assert.True(t, success) success = t.Run(fmt.Sprintf("%s-%s", "json constructor functions", ybVersion), testJsonConstructorFunctions) assert.True(t, success) diff --git a/yb-voyager/src/query/queryissue/parser_issue_detector.go b/yb-voyager/src/query/queryissue/parser_issue_detector.go index 8b5f028b9..9c32fca53 100644 --- a/yb-voyager/src/query/queryissue/parser_issue_detector.go +++ b/yb-voyager/src/query/queryissue/parser_issue_detector.go @@ -375,6 +375,7 @@ func (p *ParserIssueDetector) genericIssues(query string) ([]QueryIssue, error) NewColumnRefDetector(query), NewXmlExprDetector(query), NewRangeTableFuncDetector(query), + NewCopyCommandUnsupportedConstructsDetector(query), NewJsonConstructorFuncDetector(query), NewJsonQueryFunctionDetector(query), } @@ -414,7 +415,7 @@ func (p *ParserIssueDetector) genericIssues(query string) ([]QueryIssue, error) xmlIssueAdded = true } } - result = append(result, issues...) + result = append(result, issue) } } diff --git a/yb-voyager/src/query/queryissue/parser_issue_detector_test.go b/yb-voyager/src/query/queryissue/parser_issue_detector_test.go index 1450f4edd..0d6e9d23f 100644 --- a/yb-voyager/src/query/queryissue/parser_issue_detector_test.go +++ b/yb-voyager/src/query/queryissue/parser_issue_detector_test.go @@ -649,3 +649,42 @@ func TestRegexFunctionsIssue(t *testing.T) { } } + +func TestCopyUnsupportedConstructIssuesDetected(t *testing.T) { + expectedIssues := map[string][]QueryIssue{ + `COPY my_table FROM '/path/to/data.csv' WHERE col1 > 100;`: {NewCopyFromWhereIssue("DML_QUERY", "", `COPY my_table FROM '/path/to/data.csv' WHERE col1 > 100;`)}, + `COPY my_table(col1, col2) FROM '/path/to/data.csv' WHERE col2 = 'test';`: {NewCopyFromWhereIssue("DML_QUERY", "", `COPY my_table(col1, col2) FROM '/path/to/data.csv' WHERE col2 = 'test';`)}, + `COPY my_table FROM '/path/to/data.csv' WHERE TRUE;`: {NewCopyFromWhereIssue("DML_QUERY", "", `COPY my_table FROM '/path/to/data.csv' WHERE TRUE;`)}, + `COPY employees (id, name, age) + FROM STDIN WITH (FORMAT csv) + WHERE age > 30;`: {NewCopyFromWhereIssue("DML_QUERY", "", `COPY employees (id, name, age) + FROM STDIN WITH (FORMAT csv) + WHERE age > 30;`)}, + + `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR IGNORE);`: {NewCopyOnErrorIssue("DML_QUERY", "", `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR IGNORE);`)}, + `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR STOP);`: {NewCopyOnErrorIssue("DML_QUERY", "", `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR STOP);`)}, + + `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR IGNORE) WHERE age > 18;`: {NewCopyFromWhereIssue("DML_QUERY", "", `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR IGNORE) WHERE age > 18;`), NewCopyOnErrorIssue("DML_QUERY", "", `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR IGNORE) WHERE age > 18;`)}, + `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR STOP) WHERE name = 'Alice';`: {NewCopyFromWhereIssue("DML_QUERY", "", `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR STOP) WHERE name = 'Alice';`), NewCopyOnErrorIssue("DML_QUERY", "", `COPY table_name (name, age) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true, ON_ERROR STOP) WHERE name = 'Alice';`)}, + + `COPY my_table FROM '/path/to/data.csv' WITH (FORMAT csv);`: {}, + `COPY my_table FROM '/path/to/data.csv' WITH (FORMAT text);`: {}, + `COPY my_table FROM '/path/to/data.csv';`: {}, + `COPY my_table FROM '/path/to/data.csv' WITH (DELIMITER ',');`: {}, + `COPY my_table(col1, col2) FROM '/path/to/data.csv' WITH (FORMAT csv, HEADER true);`: {}, + } + + parserIssueDetector := NewParserIssueDetector() + + for stmt, expectedIssues := range expectedIssues { + issues, err := parserIssueDetector.getDMLIssues(stmt) + fatalIfError(t, err) + assert.Equal(t, len(expectedIssues), len(issues)) + for _, expectedIssue := range expectedIssues { + found := slices.ContainsFunc(issues, func(queryIssue QueryIssue) bool { + return cmp.Equal(expectedIssue, queryIssue) + }) + assert.True(t, found, "Expected issue not found: %v in statement: %s", expectedIssue, stmt) + } + } +} diff --git a/yb-voyager/src/query/queryparser/helpers_protomsg.go b/yb-voyager/src/query/queryparser/helpers_protomsg.go index e0de00604..8fe62bc82 100644 --- a/yb-voyager/src/query/queryparser/helpers_protomsg.go +++ b/yb-voyager/src/query/queryparser/helpers_protomsg.go @@ -366,6 +366,14 @@ func GetMessageField(msg protoreflect.Message, fieldName string) protoreflect.Me return nil } +func GetBoolField(msg protoreflect.Message, fieldName string) bool { + field := msg.Descriptor().Fields().ByName(protoreflect.Name(fieldName)) + if field != nil && msg.Has(field) { + return msg.Get(field).Bool() + } + return false +} + // GetListField retrieves a list field from a message. func GetListField(msg protoreflect.Message, fieldName string) protoreflect.List { field := msg.Descriptor().Fields().ByName(protoreflect.Name(fieldName)) @@ -403,12 +411,10 @@ func GetSchemaAndObjectName(nameList protoreflect.List) (string, string) { Example: options:{def_elem:{defname:"security_invoker" arg:{string:{sval:"true"}} defaction:DEFELEM_UNSPEC location:32}} options:{def_elem:{defname:"security_barrier" arg:{string:{sval:"false"}} defaction:DEFELEM_UNSPEC location:57}} - Extract all defnames from the def_eleme node */ func TraverseAndExtractDefNamesFromDefElem(msg protoreflect.Message) ([]string, error) { var defNames []string - collectorFunc := func(msg protoreflect.Message) error { if GetMsgFullName(msg) != PG_QUERY_DEFELEM_NODE { return nil diff --git a/yb-voyager/src/query/queryparser/traversal_proto.go b/yb-voyager/src/query/queryparser/traversal_proto.go index c988ee48e..e742a8172 100644 --- a/yb-voyager/src/query/queryparser/traversal_proto.go +++ b/yb-voyager/src/query/queryparser/traversal_proto.go @@ -49,6 +49,7 @@ const ( PG_QUERY_JSON_OBJECT_CONSTRUCTOR_NODE = "pg_query.JsonObjectConstructor" PG_QUERY_JSON_TABLE_NODE = "pg_query.JsonTable" PG_QUERY_VIEWSTMT_NODE = "pg_query.ViewStmt" + PG_QUERY_COPYSTSMT_NODE = "pg_query.CopyStmt" ) // function type for processing nodes during traversal