Skip to content

Commit

Permalink
Reorg db/table sync suites (#219)
Browse files Browse the repository at this point in the history
Some requirements for adding suites
1. Each suite should be place in a unique dir, to avoid backup/restore job limit per db
2. Some idom
   1. Kind:
      1. cross_ds/ts: cross-feature related suites
      2. db/table_sync: db or table sync related suites
      3. table_sync_alias: table sync with alias related suites
      4. db/table_ps_sync: db or table partial sync related suites
      5. syncer: suites for ccr syncer itself
   2. Resource: column,view,table ...
   3. Operation: add,alter,drop,rename ...
3. The suite is organized in the directory hierarchy of Kind/Resource/Operation, and
   Resource is optional
  • Loading branch information
w41ter authored Nov 6, 2024
1 parent a4d7b01 commit a134ed7
Show file tree
Hide file tree
Showing 66 changed files with 443 additions and 276 deletions.
51 changes: 42 additions & 9 deletions regression-test/common/helper.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
// specific language governing permissions and limitations
// under the License.

import com.google.common.collect.Maps

import java.util.Map

class Helper {
def suite
def context
Expand All @@ -31,11 +35,40 @@ class Helper {
}

String randomSuffix() {
return UUID.randomUUID().toString().replace("-", "")
def hashCode = UUID.randomUUID().toString().replace("-", "").hashCode()
if (hashCode < 0) {
hashCode *= -1;
}
return Integer.toString(hashCode)
}

def get_ccr_body(String table, String db = null) {
if (db == null) {
db = context.dbName
}

def gson = new com.google.gson.Gson()

Map<String, String> srcSpec = context.getSrcSpec(db)
srcSpec.put("table", table)

Map<String, String> destSpec = context.getDestSpec(db)
destSpec.put("table", table)

Map<String, Object> body = Maps.newHashMap()
String name = context.suiteName
if (!table.equals("")) {
name = name + "_" + table
}
body.put("name", name)
body.put("src", srcSpec)
body.put("dest", destSpec)

return gson.toJson(body)
}

void ccrJobDelete(table = "") {
def bodyJson = suite.get_ccr_body "${table}"
def bodyJson = get_ccr_body "${table}"
suite.httpTest {
uri "/delete"
endpoint syncerAddress
Expand All @@ -45,7 +78,7 @@ class Helper {
}

void ccrJobCreate(table = "") {
def bodyJson = suite.get_ccr_body "${table}"
def bodyJson = get_ccr_body "${table}"
suite.httpTest {
uri "/create_ccr"
endpoint syncerAddress
Expand All @@ -55,7 +88,7 @@ class Helper {
}

void ccrJobCreateAllowTableExists(table = "") {
def bodyJson = suite.get_ccr_body "${table}"
def bodyJson = get_ccr_body "${table}"
def jsonSlurper = new groovy.json.JsonSlurper()
def object = jsonSlurper.parseText "${bodyJson}"
object['allow_table_exists'] = true
Expand All @@ -71,7 +104,7 @@ class Helper {
}

void ccrJobPause(table = "") {
def bodyJson = suite.get_ccr_body "${table}"
def bodyJson = get_ccr_body "${table}"
suite.httpTest {
uri "/pause"
endpoint syncerAddress
Expand All @@ -81,7 +114,7 @@ class Helper {
}

void ccrJobResume(table = "") {
def bodyJson = suite.get_ccr_body "${table}"
def bodyJson = get_ccr_body "${table}"
suite.httpTest {
uri "/resume"
endpoint syncerAddress
Expand All @@ -91,7 +124,7 @@ class Helper {
}

void ccrJobDesync(table = "") {
def bodyJson = suite.get_ccr_body "${table}"
def bodyJson = get_ccr_body "${table}"
suite.httpTest {
uri "/desync"
endpoint syncerAddress
Expand Down Expand Up @@ -221,7 +254,7 @@ class Helper {
}

void force_fullsync(tableName = "") {
def bodyJson = suite.get_ccr_body "${tableName}"
def bodyJson = get_ccr_body "${tableName}"
suite.httpTest {
uri "/force_fullsync"
endpoint syncerAddress
Expand All @@ -231,7 +264,7 @@ class Helper {
}

Object get_job_progress(tableName = "") {
def request_body = suite.get_ccr_body(tableName)
def request_body = get_ccr_body(tableName)
def get_job_progress_uri = { check_func ->
suite.httpTest {
uri "/job_progress"
Expand Down
1 change: 0 additions & 1 deletion regression-test/data/ccr_user_sync/test_common_sync.out

This file was deleted.

File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.

suite("test_db_sync_fullsync_with_alias") {
suite("test_cds_fullsync_with_alias") {
def helper = new GroovyShell(new Binding(['suite': delegate]))
.evaluate(new File("${context.config.suitePath}/../common", "helper.groovy"))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@
// specific language governing permissions and limitations
// under the License.

suite("test_db_sync_signature_not_matched") {
suite("test_cds_signature_not_matched") {
def helper = new GroovyShell(new Binding(['suite': delegate]))
.evaluate(new File("${context.config.suitePath}/../common", "helper.groovy"))

def tableName = "tbl_db_sync_sig_not_matched_" + helper.randomSuffix()
def tableName = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 20
def opPartitonName = "less"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.

suite("test_sync_view_twice") {
suite("test_cds_sync_view_twice") {
def versions = sql_return_maparray "show variables like 'version_comment'"
if (versions[0].Value.contains('doris-2.0.') || versions[0].Value.contains('doris-2.1.')) {
logger.info("2.0/2.1 not support this case, current version is: ${versions[0].Value}")
Expand Down Expand Up @@ -96,3 +96,4 @@ suite("test_sync_view_twice") {
def view_size = target_sql "SHOW VIEW FROM ${tableDuplicate0}"
assertTrue(view_size.size() == 1);
}

Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.

suite("test_keyword_name") {
suite("test_cts_keyword_name") {
def helper = new GroovyShell(new Binding(['suite': delegate]))
.evaluate(new File("${context.config.suitePath}/../common", "helper.groovy"))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_add_partition") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_alter") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ suite("test_db_partial_sync_cache") {
def helper = new GroovyShell(new Binding(['suite': delegate]))
.evaluate(new File("${context.config.suitePath}/../common", "helper.groovy"))

def tableName = "tbl_partial_sync_cache_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand All @@ -38,34 +38,6 @@ suite("test_db_partial_sync_cache") {
return object.name
}

def get_job_progress = { ccr_name ->
def request_body = """ {"name":"${ccr_name}"} """
def get_job_progress_uri = { check_func ->
httpTest {
uri "/job_progress"
endpoint helper.syncerAddress
body request_body
op "post"
check check_func
}
}

def result = null
get_job_progress_uri.call() { code, body ->
if (!"${code}".toString().equals("200")) {
throw "request failed, code: ${code}, body: ${body}"
}
def jsonSlurper = new groovy.json.JsonSlurper()
def object = jsonSlurper.parseText "${body}"
if (!object.success) {
throw "request failed, error msg: ${object.error_msg}"
}
logger.info("job progress: ${object.job_progress}")
result = jsonSlurper.parseText object.job_progress
}
return result
}

helper.enableDbBinlog()
sql "DROP TABLE IF EXISTS ${tableName}"
sql """
Expand Down Expand Up @@ -93,15 +65,12 @@ suite("test_db_partial_sync_cache") {
"""
sql "sync"

def bodyJson = get_ccr_body ""
ccr_name = get_ccr_name(bodyJson)
helper.ccrJobCreate()
logger.info("ccr job name: ${ccr_name}")

assertTrue(helper.checkRestoreFinishTimesOf("${tableName}", 30))
assertTrue(helper.checkSelectTimesOf("SELECT * FROM ${tableName}", insert_num, 60))

first_job_progress = get_job_progress(ccr_name)
first_job_progress = helper.get_job_progress()

logger.info("=== Test 1: add first column case ===")
// binlog type: ALTER_JOB, binlog data:
Expand Down Expand Up @@ -140,7 +109,7 @@ suite("test_db_partial_sync_cache") {
assertTrue(helper.checkSelectTimesOf("SELECT * FROM ${tableName}", insert_num + 1, 60))

// no full sync triggered.
last_job_progress = get_job_progress(ccr_name)
last_job_progress = helper.get_job_progress()
assertTrue(last_job_progress.full_sync_start_at == first_job_progress.full_sync_start_at)
}

Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_drop_partition") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_lightning_sc") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_merge") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_replace_partition") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_trunc_table") {
return
}

def tableName = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ suite("test_db_partial_sync_inc_upsert") {
return
}

def tableName = "tbl_sync_incremental_" + UUID.randomUUID().toString().replace("-", "")
def tableName1 = "tbl_sync_incremental_1_" + UUID.randomUUID().toString().replace("-", "")
def tableName = "tbl_" + helper.randomSuffix()
def tableName1 = "tbl_" + helper.randomSuffix()
def test_num = 0
def insert_num = 5

Expand Down
Loading

0 comments on commit a134ed7

Please sign in to comment.