-
Notifications
You must be signed in to change notification settings - Fork 99
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
SNOW-1728000 Iceberg e2e tests (#1002)
- Loading branch information
1 parent
896c345
commit d969e97
Showing
9 changed files
with
245 additions
and
29 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
{ | ||
"name": "SNOWFLAKE_CONNECTOR_NAME", | ||
"config": { | ||
"connector.class": "com.snowflake.kafka.connector.SnowflakeSinkConnector", | ||
"topics": "SNOWFLAKE_TEST_TOPIC", | ||
"tasks.max": "1", | ||
"buffer.size.bytes": "5000000", | ||
"snowflake.url.name": "SNOWFLAKE_HOST", | ||
"snowflake.user.name": "SNOWFLAKE_USER", | ||
"snowflake.private.key": "SNOWFLAKE_PRIVATE_KEY", | ||
"snowflake.database.name": "SNOWFLAKE_DATABASE", | ||
"snowflake.schema.name": "SNOWFLAKE_SCHEMA", | ||
"snowflake.role.name": "SNOWFLAKE_ROLE", | ||
"snowflake.ingestion.method": "SNOWPIPE_STREAMING", | ||
"key.converter": "org.apache.kafka.connect.storage.StringConverter", | ||
"value.converter":"io.confluent.connect.avro.AvroConverter", | ||
"value.converter.schema.registry.url":"CONFLUENT_SCHEMA_REGISTRY", | ||
"value.converter.schemas.enable": "false", | ||
"jmx": "true", | ||
"errors.tolerance": "all", | ||
"errors.log.enable": true, | ||
"errors.deadletterqueue.topic.name": "DLQ_TOPIC", | ||
"errors.deadletterqueue.topic.replication.factor": 1, | ||
"snowflake.streaming.iceberg.enabled": true, | ||
"snowflake.streaming.max.client.lag": "1", | ||
"snowflake.streaming.enable.single.buffer": "true" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
from test_suit.test_utils import NonRetryableError | ||
|
||
|
||
def assert_equals(expected, actual): | ||
if expected != actual: | ||
raise NonRetryableError( | ||
"Actual {} does not equal expected {}".format(actual, expected) | ||
) | ||
|
||
|
||
def assert_equals_with_precision(expected, actual, precision=0.1): | ||
if not expected - precision < actual < expected + precision: | ||
raise NonRetryableError( | ||
"Actual {} does not equal expected {} with precision {}".format( | ||
actual, expected, precision | ||
) | ||
) | ||
|
||
|
||
def assert_starts_with(expected_prefix, actual): | ||
if not actual.startswith(expected_prefix): | ||
raise NonRetryableError( | ||
"Actual {} does not start with {}".format(expected_prefix, actual) | ||
) | ||
|
||
|
||
def assert_not_null(actual): | ||
if actual is None: | ||
raise NonRetryableError("Actual {} is null".format(actual)) | ||
|
||
|
||
def assert_dict_contains(expected_key, expected_value, actual_dict): | ||
if actual_dict[expected_key] != expected_value: | ||
raise NonRetryableError( | ||
"Actual value from dict {} does not equal expected {}".format( | ||
actual_dict[expected_key], expected_value | ||
) | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
from test_suit.base_e2e import BaseE2eTest | ||
from test_suit.assertions import * | ||
|
||
class BaseIcebergTest(BaseE2eTest): | ||
|
||
def __init__(self, driver, nameSalt): | ||
self.driver = driver | ||
self.test_message = { | ||
"id": 1, | ||
"body_temperature": 36.6, | ||
"name": "Steve", | ||
"approved_coffee_types": ["Espresso", "Doppio", "Ristretto", "Lungo"], | ||
"animals_possessed": {"dogs": True, "cats": False}, | ||
} | ||
self.test_headers = [("header1", "value1")] | ||
|
||
def verify_iceberg_content(self, content: dict): | ||
assert_equals(1, content['id']) | ||
assert_equals_with_precision(36.6, content['body_temperature']) | ||
assert_equals('Steve', content['name']) | ||
|
||
assert_equals('Espresso', content['approved_coffee_types'][0]) | ||
assert_equals('Doppio', content['approved_coffee_types'][1]) | ||
assert_equals('Ristretto', content['approved_coffee_types'][2]) | ||
assert_equals('Lungo', content['approved_coffee_types'][3]) | ||
|
||
assert_equals(True, content['animals_possessed']['dogs']) | ||
assert_equals(False, content['animals_possessed']['cats']) | ||
|
||
|
||
def verify_iceberg_metadata(self, metadata: dict): | ||
assert_equals(0, metadata['offset']) | ||
assert_equals(0, metadata['partition']) | ||
assert_starts_with('iceberg_', metadata['topic']) | ||
assert_not_null(metadata['SnowflakeConnectorPushTime']) | ||
|
||
assert_dict_contains('header1', 'value1', metadata['headers']) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
from test_suit.test_utils import RetryableError, NonRetryableError | ||
import json | ||
from confluent_kafka import avro | ||
from test_suit.base_iceberg_test import BaseIcebergTest | ||
|
||
|
||
class TestIcebergAvroAws(BaseIcebergTest): | ||
def __init__(self, driver, nameSalt: str): | ||
BaseIcebergTest.__init__(self, driver, nameSalt) | ||
self.fileName = "iceberg_avro_aws" | ||
self.topic = self.fileName + nameSalt | ||
|
||
valueSchemaStr = """ | ||
{ | ||
"type":"record", | ||
"name":"value_schema", | ||
"fields": [ | ||
{ | ||
"name": "id", | ||
"type": "int" | ||
}, | ||
{ | ||
"name": "body_temperature", | ||
"type": "float" | ||
}, | ||
{ | ||
"name": "name", | ||
"type": "string" | ||
}, | ||
{ | ||
"name": "approved_coffee_types", | ||
"type": { | ||
"type": "array", | ||
"items": "string" | ||
} | ||
}, | ||
{ | ||
"name": "animals_possessed", | ||
"type": { | ||
"type": "map", | ||
"values": "boolean" | ||
} | ||
} | ||
] | ||
} | ||
""" | ||
self.valueSchema = avro.loads(valueSchemaStr) | ||
|
||
def getConfigFileName(self): | ||
return self.fileName + ".json" | ||
|
||
def setup(self): | ||
self.driver.create_iceberg_table_with_content( | ||
table_name=self.topic, | ||
external_volume="kafka_push_e2e_volume_aws", # volume created manually | ||
) | ||
|
||
def send(self): | ||
value = [] | ||
|
||
for e in range(100): | ||
value.append(self.test_message) | ||
|
||
self.driver.sendAvroSRData( | ||
topic=self.topic, | ||
value=value, | ||
value_schema=self.valueSchema, | ||
headers=self.test_headers, | ||
) | ||
|
||
def verify(self, round): | ||
number_of_records = self.driver.select_number_of_records(self.topic) | ||
if number_of_records == 0: | ||
raise RetryableError() | ||
elif number_of_records != 100: | ||
raise NonRetryableError( | ||
"Number of record in table is different from number of record sent" | ||
) | ||
|
||
first_record = ( | ||
self.driver.snowflake_conn.cursor() | ||
.execute("Select * from {} limit 1".format(self.topic)) | ||
.fetchone() | ||
) | ||
|
||
self.verify_iceberg_content(json.loads(first_record[0])) | ||
self.verify_iceberg_metadata(json.loads(first_record[1])) | ||
|
||
def clean(self): | ||
self.driver.drop_iceberg_table(self.topic) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters