From a6e4300d249a312a9c3f594f9e2688fef483526a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cv=5Fkkhuang=E2=80=9D?= <“420895376@qq.com”> Date: Thu, 19 Sep 2024 16:38:24 +0800 Subject: [PATCH] Fix ddl dml --- linkis-dist/package/db/linkis_ddl.sql | 392 ++++++++++++++---- linkis-dist/package/db/linkis_dml.sql | 146 +++++-- .../upgrade/1.6.0_schema/mysql/linkis_ddl.sql | 81 +++- .../upgrade/1.6.0_schema/mysql/linkis_dml.sql | 87 ++++ 4 files changed, 580 insertions(+), 126 deletions(-) diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql index 6cb3c839e5..92835c9e09 100644 --- a/linkis-dist/package/db/linkis_ddl.sql +++ b/linkis-dist/package/db/linkis_ddl.sql @@ -26,6 +26,13 @@ -- 组合索引建议包含所有字段名,过长的字段名可以采用缩写形式。例如idx_age_name_add -- 索引名尽量不超过50个字符,命名应该使用小写 + +-- 注意事项 +-- 1. TDSQL层面做了硬性规定,对于varchar索引,字段总长度不能超过768个字节,建议组合索引的列的长度根据实际列数值的长度定义,比如身份证号定义长度为varchar(20),不要定位为varchar(100), +-- 同时,由于TDSQL默认采用UTF8字符集,一个字符3个字节,因此,实际索引所包含的列的长度要小于768/3=256字符长度。 +-- 2. AOMP 执行sql 语句 create table 可以带反撇号,alter 语句不能带反撇号 +-- 3. 使用 alter 添加、修改字段时请带要字符集和排序规则 CHARSET utf8mb4 COLLATE utf8mb4_bin + SET FOREIGN_KEY_CHECKS=0; DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`; @@ -42,14 +49,14 @@ CREATE TABLE `linkis_ps_configuration_config_key`( `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', - `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `boundary_type` tinyint(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', `template_required` tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must', - UNIQUE KEY `uniq_key_ectype` (`key`,`engine_conn_type`), + UNIQUE INDEX `uniq_key_ectype` (`key`,`engine_conn_type`), PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; @@ -59,7 +66,7 @@ CREATE TABLE `linkis_ps_configuration_key_engine_relation`( `engine_type_label_id` bigint(20) NOT NULL COMMENT 'engine label id', PRIMARY KEY (`id`), UNIQUE INDEX `uniq_kid_lid` (`config_key_id`, `engine_type_label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_config_value`; @@ -72,7 +79,7 @@ CREATE TABLE `linkis_ps_configuration_config_value`( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE INDEX `uniq_kid_lid` (`config_key_id`, `config_label_id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_category`; CREATE TABLE `linkis_ps_configuration_category` ( @@ -85,7 +92,7 @@ CREATE TABLE `linkis_ps_configuration_category` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE INDEX `uniq_label_id` (`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_template_config_key` ( @@ -171,7 +178,7 @@ CREATE TABLE `linkis_ps_job_history_group_history` ( PRIMARY KEY (`id`), KEY `idx_created_time` (`created_time`), KEY `idx_submit_user` (`submit_user`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_job_history_detail`; @@ -187,19 +194,19 @@ CREATE TABLE `linkis_ps_job_history_detail` ( `status` varchar(32) DEFAULT NULL COMMENT 'status', `priority` int(4) DEFAULT 0 COMMENT 'order of subjob', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_common_lock`; CREATE TABLE `linkis_ps_common_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, - `locker` varchar(255) COLLATE utf8_bin NOT NULL, + `locker` VARCHAR(255) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL COMMENT 'locker', `time_out` longtext COLLATE utf8_bin, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_lock_object` (`lock_object`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -215,7 +222,7 @@ CREATE TABLE `linkis_ps_udf_manager` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- @@ -230,7 +237,7 @@ CREATE TABLE `linkis_ps_udf_shared_group` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_udf_shared_info`; CREATE TABLE `linkis_ps_udf_shared_info` @@ -240,7 +247,7 @@ CREATE TABLE `linkis_ps_udf_shared_info` `user_name` varchar(50) NOT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_udf_tree @@ -249,7 +256,7 @@ DROP TABLE IF EXISTS `linkis_ps_udf_tree`; CREATE TABLE `linkis_ps_udf_tree` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `parent` bigint(20) NOT NULL, - `name` varchar(100) DEFAULT NULL COMMENT 'Category name of the function. It would be displayed in the front-end', + `name` varchar(50) DEFAULT NULL COMMENT 'Category name of the function. It would be displayed in the front-end', `user_name` varchar(50) NOT NULL, `description` varchar(255) DEFAULT NULL, `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -257,7 +264,7 @@ CREATE TABLE `linkis_ps_udf_tree` ( `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', PRIMARY KEY (`id`), UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- @@ -273,7 +280,7 @@ CREATE TABLE `linkis_ps_udf_user_load` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; CREATE TABLE `linkis_ps_udf_baseinfo` ( @@ -289,7 +296,7 @@ CREATE TABLE `linkis_ps_udf_baseinfo` ( `is_expire` bit(1) DEFAULT NULL, `is_shared` bit(1) DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- bdp_easy_ide.linkis_ps_udf_version definition DROP TABLE IF EXISTS `linkis_ps_udf_version`; @@ -307,7 +314,7 @@ CREATE TABLE `linkis_ps_udf_version` ( `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; SET FOREIGN_KEY_CHECKS=0; @@ -325,7 +332,7 @@ CREATE TABLE `linkis_ps_variable_key_user` ( UNIQUE KEY `uniq_aid_kid_uname` (`application_id`,`key_id`,`user_name`), KEY `idx_key_id` (`key_id`), KEY `idx_aid` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- @@ -343,7 +350,7 @@ CREATE TABLE `linkis_ps_variable_key` ( `value_regex` varchar(100) DEFAULT NULL COMMENT 'Reserved word', PRIMARY KEY (`id`), KEY `idx_aid` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_access @@ -357,7 +364,7 @@ CREATE TABLE `linkis_ps_datasource_access` ( `application_id` int(4) NOT NULL, `access_time` datetime NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_field @@ -377,7 +384,7 @@ CREATE TABLE `linkis_ps_datasource_field` ( `length` int(11) DEFAULT NULL, `mode_info` varchar(128) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_import @@ -389,7 +396,7 @@ CREATE TABLE `linkis_ps_datasource_import` ( `import_type` int(4) NOT NULL, `args` varchar(255) COLLATE utf8_bin NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_lineage @@ -401,7 +408,7 @@ CREATE TABLE `linkis_ps_datasource_lineage` ( `source_table` varchar(64) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_table @@ -427,7 +434,7 @@ CREATE TABLE `linkis_ps_datasource_table` ( `is_available` tinyint(1) NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uniq_db_name` (`database`,`name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_table_info @@ -444,7 +451,7 @@ CREATE TABLE `linkis_ps_datasource_table_info` ( `update_time` datetime NOT NULL, `field_num` int(11) NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -468,7 +475,7 @@ CREATE TABLE `linkis_ps_cs_context_map` ( PRIMARY KEY (`id`), UNIQUE KEY `uniq_key_cid_ctype` (`key`,`context_id`,`context_type`), KEY `idx_keywords` (`keywords`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_cs_context_map_listener @@ -482,7 +489,7 @@ CREATE TABLE `linkis_ps_cs_context_map_listener` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_cs_context_history @@ -500,7 +507,7 @@ CREATE TABLE `linkis_ps_cs_context_history` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', KEY `idx_keyword` (`keyword`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_cs_context_id @@ -513,8 +520,8 @@ CREATE TABLE `linkis_ps_cs_context_id` ( `source` varchar(255) DEFAULT NULL, `expire_type` varchar(32) DEFAULT NULL, `expire_time` datetime DEFAULT NULL, - `instance` varchar(128) DEFAULT NULL, - `backup_instance` varchar(255) DEFAULT NULL, + `instance` varchar(64) DEFAULT NULL, + `backup_instance` varchar(64) DEFAULT NULL, `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', @@ -522,7 +529,7 @@ CREATE TABLE `linkis_ps_cs_context_id` ( KEY `idx_instance` (`instance`(128)), KEY `idx_backup_instance` (`backup_instance`(191)), KEY `idx_instance_bin` (`instance`(128),`backup_instance`(128)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_cs_context_listener @@ -536,7 +543,7 @@ CREATE TABLE `linkis_ps_cs_context_listener` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_bml_resources`; @@ -558,7 +565,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources` ( `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_bml_resources_version`; @@ -579,7 +586,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources_version` ( `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', unique key `uniq_rid_version`(`resource_id`, `version`), PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -593,7 +600,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources_permission` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'updated time', `updator` varchar(50) NOT NULL COMMENT 'updator', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -608,7 +615,7 @@ CREATE TABLE if not exists `linkis_ps_resources_download_history` ( `version` varchar(20) not null, `downloader` varchar(50) NOT NULL COMMENT 'Downloader', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -632,7 +639,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources_task` ( `last_update_time` datetime NOT NULL COMMENT 'Last update time', unique key `uniq_rid_version` (resource_id, version), PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -648,7 +655,7 @@ create table if not exists linkis_ps_bml_project( `create_time` datetime DEFAULT now(), unique key `uniq_name` (`name`), PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin ROW_FORMAT=COMPACT; @@ -663,7 +670,7 @@ create table if not exists linkis_ps_bml_project_user( `expire_time` datetime default null, unique key `uniq_name_pid`(`username`, `project_id`), PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin ROW_FORMAT=COMPACT; DROP TABLE IF EXISTS `linkis_ps_bml_project_resource`; @@ -672,34 +679,34 @@ create table if not exists linkis_ps_bml_project_resource( `project_id` int(10) NOT NULL, `resource_id` varchar(128) DEFAULT NULL, PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin ROW_FORMAT=COMPACT; DROP TABLE IF EXISTS `linkis_ps_instance_label`; CREATE TABLE `linkis_ps_instance_label` ( `id` int(20) NOT NULL AUTO_INCREMENT, `label_key` varchar(32) COLLATE utf8_bin NOT NULL COMMENT 'string key', - `label_value` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'string value', + `label_value` varchar(128) COLLATE utf8_bin NOT NULL COMMENT 'string value', `label_feature` varchar(16) COLLATE utf8_bin NOT NULL COMMENT 'store the feature of label, but it may be redundant', `label_value_size` int(20) NOT NULL COMMENT 'size of key -> value map', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_lk_lv` (`label_key`,`label_value`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_instance_label_value_relation`; CREATE TABLE `linkis_ps_instance_label_value_relation` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'value key', + `label_value_key` varchar(128) COLLATE utf8_bin NOT NULL COMMENT 'value key', `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT 'value content', `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_ps_instance_label -> id', `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_lvk_lid` (`label_value_key`,`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_instance_label_relation`; CREATE TABLE `linkis_ps_instance_label_relation` ( @@ -710,7 +717,7 @@ CREATE TABLE `linkis_ps_instance_label_relation` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_lid_instance` (`label_id`,`service_instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_instance_info`; @@ -722,7 +729,7 @@ CREATE TABLE `linkis_ps_instance_info` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_instance` (`instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_ps_error_code`; CREATE TABLE `linkis_ps_error_code` ( @@ -732,8 +739,8 @@ CREATE TABLE `linkis_ps_error_code` ( `error_regex` varchar(1024) DEFAULT NULL, `error_type` int(3) DEFAULT 0, PRIMARY KEY (`id`), - UNIQUE INDEX `idx_error_regex` (error_regex(255)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + UNIQUE INDEX `idx_error_regex` (error_regex(191)) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_service_instance`; CREATE TABLE `linkis_cg_manager_service_instance` ( @@ -748,9 +755,10 @@ CREATE TABLE `linkis_cg_manager_service_instance` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, `creator` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `params` text COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uniq_instance` (`instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_linkis_resources`; CREATE TABLE `linkis_cg_manager_linkis_resources` ( @@ -768,7 +776,7 @@ CREATE TABLE `linkis_cg_manager_linkis_resources` ( `updator` varchar(255) COLLATE utf8_bin DEFAULT NULL, `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_lock`; CREATE TABLE `linkis_cg_manager_lock` ( @@ -778,7 +786,7 @@ CREATE TABLE `linkis_cg_manager_lock` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_rm_external_resource_provider`; CREATE TABLE `linkis_cg_rm_external_resource_provider` ( @@ -788,7 +796,7 @@ CREATE TABLE `linkis_cg_rm_external_resource_provider` ( `labels` varchar(32) DEFAULT NULL, `config` text NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_engine_em`; CREATE TABLE `linkis_cg_manager_engine_em` ( @@ -798,32 +806,33 @@ CREATE TABLE `linkis_cg_manager_engine_em` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label`; CREATE TABLE `linkis_cg_manager_label` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_key` varchar(50) COLLATE utf8_bin NOT NULL, - `label_value` varchar(255) COLLATE utf8_bin NOT NULL, + `label_key` varchar(32) COLLATE utf8_bin NOT NULL, + `label_value` varchar(128) COLLATE utf8_bin NOT NULL, `label_feature` varchar(16) COLLATE utf8_bin NOT NULL, `label_value_size` int(20) NOT NULL, `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_lk_lv` (`label_key`,`label_value`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_value_relation`; CREATE TABLE `linkis_cg_manager_label_value_relation` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL, + `label_value_key` varchar(128) COLLATE utf8_bin NOT NULL, `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL, `label_id` int(20) DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - UNIQUE KEY `uniq_lvk_lid` (`label_value_key`,`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + UNIQUE KEY `uniq_lvk_lid` (`label_value_key`,`label_id`), + UNIQUE KEY `unlid_lvk_lvc` (`label_id`,`label_value_key`,`label_value_content`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_resource`; CREATE TABLE `linkis_cg_manager_label_resource` ( @@ -834,16 +843,16 @@ CREATE TABLE `linkis_cg_manager_label_resource` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_label_id` (`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_ec_resource_info_record`; CREATE TABLE `linkis_cg_ec_resource_info_record` ( `id` INT(20) NOT NULL AUTO_INCREMENT, - `label_value` VARCHAR(255) NOT NULL COMMENT 'ec labels stringValue', + `label_value` VARCHAR(128) NOT NULL COMMENT 'ec labels stringValue', `create_user` VARCHAR(128) NOT NULL COMMENT 'ec create user', `service_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'ec instance info', `ecm_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'ecm instance info ', - `ticket_id` VARCHAR(100) NOT NULL COMMENT 'ec ticket id', + `ticket_id` VARCHAR(36) NOT NULL COMMENT 'ec ticket id', `status` varchar(50) DEFAULT NULL COMMENT 'EC status: Starting,Unlock,Locked,Idle,Busy,Running,ShuttingDown,Failed,Success', `log_dir_suffix` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'log path', `request_times` INT(8) COMMENT 'resource request times', @@ -859,8 +868,8 @@ CREATE TABLE `linkis_cg_ec_resource_info_record` ( PRIMARY KEY (`id`), KEY `idx_ticket_id` (`ticket_id`), UNIQUE KEY `uniq_tid_lv` (`ticket_id`,`label_value`), - UNIQUE KEY uniq_sinstance_status_cuser_ctime (service_instance, status, create_user, create_time) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + UNIQUE KEY `uniq_sinstance_status_cuser_ctime` (`service_instance`, `status`, `create_user`, `create_time`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_service_instance`; CREATE TABLE `linkis_cg_manager_label_service_instance` ( @@ -871,7 +880,7 @@ CREATE TABLE `linkis_cg_manager_label_service_instance` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `idx_lid_instance` (`label_id`,`service_instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_user`; @@ -882,7 +891,7 @@ CREATE TABLE `linkis_cg_manager_label_user` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_metrics_history`; @@ -898,7 +907,7 @@ CREATE TABLE `linkis_cg_manager_metrics_history` ( `serviceName` varchar(255) COLLATE utf8_bin DEFAULT NULL, `instance` varchar(255) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_manager_service_instance_metrics`; CREATE TABLE `linkis_cg_manager_service_instance_metrics` ( @@ -909,9 +918,9 @@ CREATE TABLE `linkis_cg_manager_service_instance_metrics` ( `healthy_status` varchar(255) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, - description varchar(256) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT '', + `description` varchar(256) COLLATE utf8_bin NOT NULL DEFAULT '', PRIMARY KEY (`instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; DROP TABLE IF EXISTS `linkis_cg_engine_conn_plugin_bml_resources`; CREATE TABLE `linkis_cg_engine_conn_plugin_bml_resources` ( @@ -926,7 +935,7 @@ CREATE TABLE `linkis_cg_engine_conn_plugin_bml_resources` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time', `last_update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'updated time', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource @@ -940,7 +949,7 @@ CREATE TABLE `linkis_ps_dm_datasource` `datasource_type_id` int(11) NOT NULL, `create_identify` varchar(255) COLLATE utf8_bin DEFAULT NULL, `create_system` varchar(255) COLLATE utf8_bin DEFAULT NULL, - `parameter` varchar(1024) COLLATE utf8_bin NULL DEFAULT NULL, + `parameter` varchar(2048) COLLATE utf8_bin NULL DEFAULT NULL, `create_time` datetime NULL DEFAULT CURRENT_TIMESTAMP, `modify_time` datetime NULL DEFAULT CURRENT_TIMESTAMP, `create_user` varchar(255) COLLATE utf8_bin DEFAULT NULL, @@ -951,7 +960,7 @@ CREATE TABLE `linkis_ps_dm_datasource` `published_version_id` int(11) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE INDEX `uniq_datasource_name` (`datasource_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource_env @@ -963,7 +972,7 @@ CREATE TABLE `linkis_ps_dm_datasource_env` `env_name` varchar(32) COLLATE utf8_bin NOT NULL, `env_desc` varchar(255) COLLATE utf8_bin DEFAULT NULL, `datasource_type_id` int(11) NOT NULL, - `parameter` varchar(1024) COLLATE utf8_bin DEFAULT NULL, + `parameter` varchar(2048) COLLATE utf8_bin DEFAULT NULL, `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `create_user` varchar(255) COLLATE utf8_bin NULL DEFAULT NULL, `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -971,7 +980,7 @@ CREATE TABLE `linkis_ps_dm_datasource_env` PRIMARY KEY (`id`), UNIQUE KEY `uniq_env_name` (`env_name`), UNIQUE INDEX `uniq_name_dtid` (`env_name`, `datasource_type_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- @@ -992,7 +1001,7 @@ CREATE TABLE `linkis_ps_dm_datasource_type` `classifier_en` varchar(32) DEFAULT NULL COMMENT 'english classifier', PRIMARY KEY (`id`), UNIQUE INDEX `uniq_name` (`name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource_type_key @@ -1004,7 +1013,7 @@ CREATE TABLE `linkis_ps_dm_datasource_type_key` `data_source_type_id` int(11) NOT NULL, `key` varchar(32) COLLATE utf8_bin NOT NULL, `name` varchar(32) COLLATE utf8_bin NOT NULL, - `name_en` varchar(32) COLLATE utf8_bin NOT NULL, + `name_en` varchar(32) COLLATE utf8_bin NULL DEFAULT NULL, `default_value` varchar(50) COLLATE utf8_bin NULL DEFAULT NULL, `value_type` varchar(50) COLLATE utf8_bin NOT NULL, `scope` varchar(50) COLLATE utf8_bin NULL DEFAULT NULL, @@ -1019,7 +1028,7 @@ CREATE TABLE `linkis_ps_dm_datasource_type_key` `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_dstid_key` (`data_source_type_id`, `key`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource_version -- ---------------------------- @@ -1033,7 +1042,7 @@ CREATE TABLE `linkis_ps_dm_datasource_version` `create_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP, `create_user` varchar(255) COLLATE utf8_bin NULL DEFAULT NULL, PRIMARY KEY `uniq_vid_did` (`version_id`, `datasource_id`) USING BTREE -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_mg_gateway_auth_token @@ -1051,7 +1060,7 @@ CREATE TABLE `linkis_mg_gateway_auth_token` ( `update_by` varchar(32), PRIMARY KEY (`id`), UNIQUE KEY `uniq_token_name` (`token_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; @@ -1068,10 +1077,10 @@ CREATE TABLE `linkis_cg_tenant_label_config` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `desc` varchar(100) COLLATE utf8_bin NOT NULL, `bussiness_user` varchar(50) COLLATE utf8_bin NOT NULL, - `is_valid` varchar(1) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT 'Y' COMMENT 'is valid', + `is_valid` varchar(1) COLLATE utf8_bin NOT NULL DEFAULT 'Y' COMMENT 'is valid', PRIMARY KEY (`id`), UNIQUE KEY `uniq_user_creator` (`user`,`creator`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- ---------------------------- -- Table structure for linkis_cg_user_ip_config @@ -1088,7 +1097,9 @@ CREATE TABLE `linkis_cg_user_ip_config` ( `bussiness_user` varchar(50) COLLATE utf8_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uniq_user_creator` (`user`,`creator`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + + -- ---------------------------- -- Table structure for linkis_org_user @@ -1108,3 +1119,218 @@ CREATE TABLE `linkis_org_user` ( `user_itsm_no` varchar(64) COMMENT 'user itsm no', PRIMARY KEY (`user_name`) ) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='user org info'; + + + + + + +-- 商业化 未开源的放在最后面 上面的sql 和开源保持一致 +-- ---------------------------- +-- Table structure for linkis_cg_synckey +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cg_synckey`; +CREATE TABLE `linkis_cg_synckey` ( + `username` char(32) NOT NULL, + `synckey` char(32) NOT NULL, + `instance` varchar(32) NOT NULL, + `create_time` datetime(3) NOT NULL, + PRIMARY KEY (`username`, `synckey`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + + + +-- ---------------------------- +-- Table structure for linkis_et_validator_checkinfo +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_et_validator_checkinfo`; +CREATE TABLE `linkis_et_validator_checkinfo` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `execute_user` varchar(64) COLLATE utf8_bin NOT NULL, + `db_name` varchar(64) COLLATE utf8_bin DEFAULT NULL, + `params` text COLLATE utf8_bin, + `code_type` varchar(32) COLLATE utf8_bin NOT NULL, + `operation_type` varchar(32) COLLATE utf8_bin NOT NULL, + `status` tinyint(4) DEFAULT NULL, + `code` text COLLATE utf8_bin, + `msg` text COLLATE utf8_bin, + `risk_level` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `hit_rules` text COLLATE utf8_bin, + `create_time` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +-- ---------------------------- +-- Table structure for linkis_ps_bml_cleaned_resources_version +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_ps_bml_cleaned_resources_version`; +CREATE TABLE `linkis_ps_bml_cleaned_resources_version` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键', + `resource_id` varchar(50) NOT NULL COMMENT '资源id,资源的uuid', + `file_md5` varchar(32) NOT NULL COMMENT '文件的md5摘要', + `version` varchar(20) NOT NULL COMMENT '资源版本(v 加上 五位数字)', + `size` int(10) NOT NULL COMMENT '文件大小', + `start_byte` bigint(20) unsigned NOT NULL DEFAULT '0', + `end_byte` bigint(20) unsigned NOT NULL DEFAULT '0', + `resource` varchar(2000) NOT NULL COMMENT '资源内容(文件信息 包括 路径和文件名)', + `description` varchar(2000) DEFAULT NULL COMMENT '描述', + `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '开始时间', + `end_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '结束时间', + `client_ip` varchar(200) NOT NULL COMMENT '客户端ip', + `updator` varchar(50) DEFAULT NULL COMMENT '修改者', + `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态,1:正常,0:冻结', + `old_resource` varchar(2000) NOT NULL COMMENT '旧的路径', + PRIMARY KEY (`id`), + UNIQUE KEY `resource_id_version` (`resource_id`,`version`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + + +-- ---------------------------- +-- Table structure for linkis_ps_configuration_across_cluster_rule +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_ps_configuration_across_cluster_rule`; +CREATE TABLE `linkis_ps_configuration_across_cluster_rule` ( + id INT AUTO_INCREMENT COMMENT '规则ID,自增主键', + cluster_name char(32) NOT NULL COMMENT '集群名称,不能为空', + creator char(32) NOT NULL COMMENT '创建者,不能为空', + username char(32) NOT NULL COMMENT '用户,不能为空', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间,不能为空', + create_by char(32) NOT NULL COMMENT '创建者,不能为空', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间,不能为空', + update_by char(32) NOT NULL COMMENT '更新者,不能为空', + rules varchar(512) NOT NULL COMMENT '规则内容,不能为空', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT '是否有效 Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +-- ---------------------------- +-- Table structure for linkis_ps_configuration_template_config_key +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE `linkis_ps_configuration_template_config_key` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `template_name` VARCHAR(200) NOT NULL COMMENT '配置模板名称 冗余存储', + `template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `validate_range` VARCHAR(50) NULL DEFAULT NULL COMMENT '校验正则(预留) ', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT '创建人', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`), + UNIQUE INDEX `uniq_tname_kid` (`template_uuid`, `key_id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +-- ---------------------------- +-- Table structure for linkis_ps_configuration_key_limit_for_user +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT '用户名', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT '组合标签 combined_userCreator_engineType 如 hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT '创建人', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + + + +-- ---------------------------- +-- Table structure for linkis_org_user_sync +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_org_user_sync`; +CREATE TABLE `linkis_org_user_sync` ( + `cluster_code` varchar(16) COMMENT '集群', + `user_type` varchar(64) COMMENT '用户类型', + `user_name` varchar(128) COMMENT '授权用户', + `org_id` varchar(16) COMMENT '部门ID', + `org_name` varchar(64) COMMENT '部门名字', + `queue_name` varchar(64) COMMENT '默认资源队列', + `db_name` varchar(64) COMMENT '默认操作数据库', + `interface_user` varchar(64) COMMENT '接口人', + `is_union_analyse` varchar(64) COMMENT '是否联合分析人', + `create_time` varchar(64) COMMENT '用户创建时间', + `user_itsm_no` varchar(64) COMMENT '用户创建单号', + PRIMARY KEY (`user_name`) +) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='用户部门统计INC表'; + +-- ---------------------------- +-- Table structure for linkis_cg_tenant_department_config +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cg_tenant_department_config`; +CREATE TABLE `linkis_cg_tenant_department_config` ( + `id` int(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', + `creator` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '应用', + `department` varchar(64) COLLATE utf8_bin NOT NULL COMMENT '部门名称', + `department_id` varchar(16) COLLATE utf8_bin NOT NULL COMMENT '部门ID', + `tenant_value` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '部门租户标签', + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', + `create_by` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '创建用户', + `is_valid` varchar(1) COLLATE utf8_bin NOT NULL DEFAULT 'Y' COMMENT '是否有效', + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_creator_department` (`creator`,`department`) +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +-- ---------------------------- +-- Table structure for linkis_mg_gateway_whitelist_config +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_config`; +CREATE TABLE `linkis_mg_gateway_whitelist_config` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `allowed_user` varchar(128) COLLATE utf8_bin NOT NULL, + `client_address` varchar(128) COLLATE utf8_bin NOT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `address_uniq` (`allowed_user`, `client_address`), + KEY `linkis_mg_gateway_whitelist_config_allowed_user` (`allowed_user`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +-- ---------------------------- +-- Table structure for linkis_mg_gateway_whitelist_sensitive_user +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_sensitive_user`; +CREATE TABLE `linkis_mg_gateway_whitelist_sensitive_user` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `sensitive_username` varchar(128) COLLATE utf8_bin NOT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `sensitive_username` (`sensitive_username`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +-- ---------------------------- +-- Table structure for linkis_ps_python_module_info +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_ps_python_module_info`; +CREATE TABLE `linkis_ps_python_module_info` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '自增id', + `name` varchar(255) NOT NULL COMMENT 'python模块名称', + `description` text COMMENT 'python模块描述', + `path` varchar(255) NOT NULL COMMENT 'hdfs路径', + `engine_type` varchar(50) NOT NULL COMMENT '引擎类型,python/spark/all', + `create_user` varchar(50) NOT NULL COMMENT '创建用户', + `update_user` varchar(50) NOT NULL COMMENT '修改用户', + `is_load` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否加载,0-未加载,1-已加载', + `is_expire` tinyint(1) DEFAULT NULL COMMENT '是否过期,0-未过期,1-已过期)', + `create_time` datetime NOT NULL COMMENT '创建时间', + `update_time` datetime NOT NULL COMMENT '修改时间', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='Python模块包信息表'; \ No newline at end of file diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql index d786b58444..dea1719942 100644 --- a/linkis-dist/package/db/linkis_dml.sql +++ b/linkis-dist/package/db/linkis_dml.sql @@ -28,6 +28,7 @@ SET @TRINO_LABEL="trino-371"; SET @IO_FILE_LABEL="io_file-1.0"; SET @OPENLOOKENG_LABEL="openlookeng-1.5.0"; SET @ELASTICSEARCH_LABEL="elasticsearch-7.6.2"; +SET @NEBULA_LABEL="nebula-3.0.0"; -- 衍生变量: SET @SPARK_ALL=CONCAT('*-*,',@SPARK_LABEL); @@ -67,33 +68,38 @@ SET @IO_FILE_IDE=CONCAT('*-IDE,',@IO_FILE_LABEL); SET @ELASTICSEARCH_ALL=CONCAT('*-*,',@ELASTICSEARCH_LABEL); SET @ELASTICSEARCH_IDE=CONCAT('*-IDE,',@ELASTICSEARCH_LABEL); +SET @NEBULA_ALL=CONCAT('*-*,',@NEBULA_LABEL); +SET @NEBULA_IDE=CONCAT('*-IDE,',@NEBULA_LABEL); + -- Global Settings INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '128', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '500', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '1000G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '100G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '128', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-4000,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d{0,2}|[1-3]\\d{3}|4000)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-10000,单位:G', '队列内存使用上限', '300G', 'Regex', '^(?:[1-9]\\d{0,3}|10000)(G|g)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('linkis.entrance.creator.job.concurrency.limit', 'Creator级别限制,范围:1-10000,单位:个', 'Creator最大并发数', '10000', 'NumInterval', '[1,10000]', '', 0, 1, 1, '队列资源', 3, 'creator maximum task limit', 'creator maximum task limit', 'QueueResources', '1'); -- spark -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark', '1'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '1', 'NumInterval', '[1,8]', '0', '0', '1','spark资源设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:1-15,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('spark.executor.memory', '取值范围:1-28,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-9]|2[0-8])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark', '1'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h,6h,12h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"6h\",\"12h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.spill=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.compress=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.shuffle.compress=true;', 'Spark Custom Configuration Parameters'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.locality.wait', '范围:0-3,单位:秒', '任务调度本地等待时间', '3s', 'OFT', '[\"0s\",\"1s\",\"2s\",\"3s\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0-3, Unit: second', 'Task Scheduling Local Waiting Time'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.memory.fraction', '范围:0.4,0.5,0.6,单位:百分比', '执行内存和存储内存的百分比', '0.6', 'OFT', '[\"0.4\",\"0.5\",\"0.6\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0.4, 0.5, 0.6, in percentage', 'Percentage Of Execution Memory And Storage Memory'); -- hive -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive', '1'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:-1-10000,单位:个', 'reduce数', '-1', 'NumInterval', '[-1,10000]', '0', '1', '1', 'hive资源设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ("mapreduce.job.running.reduce.limit", '范围:10-999,单位:个', 'hive引擎reduce限制', '999', 'NumInterval', '[10,999]', '0', '1', '1', 'MapReduce设置', 'hive','Value Range: 10-999, Unit: Piece', 'Number Limit Of MapReduce Job Running Reduce', 'MapReduce Settings', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('mapreduce.job.reduce.slowstart.completedmaps', '取值范围:0-1', 'Map任务数与总Map任务数之间的比例','0.05', 'Regex', '^(0(\\.\\d{1,2})?|1(\\.0{1,2})?)$', '0', '0', '1', 'hive引擎设置', 'hive', 'Value Range: 0-1', 'The Ratio Between The Number Of Map Tasks And The Total Number Of Map Tasks', 'Hive Engine Settings', '1'); -- python INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python'); @@ -104,7 +110,7 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, -- pipeline INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t或;或|', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\",\"\\\\;\",\"\\\\|\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwrite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); @@ -168,8 +174,20 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore.type', 'Trino服务器SSL truststore类型', 'truststore类型', 'null', 'None', '', 'trino', 0, 0, 1, '数据源配置'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore.password', 'Trino服务器SSL truststore密码', 'truststore密码', 'null', 'None', '', 'trino', 0, 0, 1, '数据源配置'); +-- nebula +INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.host','Nebula 连接地址','Nebula 连接地址',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Host','Nebula Host',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.port','Nebula 连接端口','Nebula 连接端口',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Port','Nebula Port',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.username','Nebula 连接用户名','Nebula 连接用户名',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Username','Nebula Username',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.password','Nebula 连接密码','Nebula 连接密码',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Password','Nebula Password',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.space', 'Nebula 图空间', 'Nebula 图空间', NULL, 'None', NULL, 'nebula', 0, 0, 1, 'Necula引擎设置', 0, 'Nebula Engine Settings', 'Nebula Space', 'Nebula Space', 0); + -- Configuration first level directory -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-GlobalSettings,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-全局设置,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); @@ -186,12 +204,13 @@ insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_featur insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @ELASTICSEARCH_ALL, 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @PRESTO_ALL, 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @TRINO_ALL, 'OPTIONAL', 2, now(), now()); - +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @NEBULA_IDE,'OPTIONAL',2,now(),now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @NEBULA_ALL,'OPTIONAL',2,now(),now()); -- Custom correlation engine (e.g. spark) and configKey value -- Global Settings insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config -INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type is null and label.label_value = "*-*,*-*"); +INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = '' and label.label_value = "*-*,*-*"); -- spark(Here choose to associate all spark type Key values with spark) insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) @@ -244,6 +263,11 @@ insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `eng (select config.id as config_key_id, label.id AS engine_type_label_id FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'trino' and label_value = @TRINO_ALL); +-- nebula-3.0.0 +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) +(select config.id as config_key_id, label.id AS engine_type_label_id FROM linkis_ps_configuration_config_key config +INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and label_value = @NEBULA_ALL); + -- If you need to customize the parameters of the new engine, the following configuration does not need to write SQL initialization -- Just write the SQL above, and then add applications and engines to the management console to automatically initialize the configuration @@ -362,21 +386,27 @@ insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_val INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @TRINO_ALL); +-- nebula default configuration +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) +(select relation.config_key_id AS config_key_id, '' AS config_value, relation.engine_type_label_id AS config_label_id FROM `linkis_ps_configuration_key_engine_relation` relation +INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @NEBULA_ALL); + insert into `linkis_cg_rm_external_resource_provider`(`id`,`resource_type`,`name`,`labels`,`config`) values (1,'Yarn','default',NULL,'{"rmWebAddress":"@YARN_RESTFUL_URL","hadoopVersion":"@HADOOP_VERSION","authorEnable":@YARN_AUTH_ENABLE,"user":"@YARN_AUTH_USER","pwd":"@YARN_AUTH_PWD","kerberosEnable":@YARN_KERBEROS_ENABLE,"principalName":"@YARN_PRINCIPAL_NAME","keytabPath":"@YARN_KEYTAB_PATH","krb5Path":"@YARN_KRB5_PATH"}'); -- errorcode -- 01 linkis server INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01001','您的任务没有路由到后台ECM,请联系管理员','The em of labels',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01002','Linkis服务负载过高,请联系管理员扩容','Unexpected end of file from server',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01003','Linkis服务负载过高,请联系管理员扩容','failed to ask linkis Manager Can be retried SocketTimeoutException',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01002','任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容','Unexpected end of file from server',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01003','任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容','failed to ask linkis Manager Can be retried SocketTimeoutException',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','引擎在启动时被Kill,请联系管理员',' [0-9]+ Killed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','请求Yarn获取队列信息重试2次仍失败,请联系管理员','Failed to request external resourceClassCastException',0); - +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01006','没有健康可用的ecm节点,可能任务量大,导致节点资源处于不健康状态,尝试kill空闲引擎释放资源','There are corresponding ECM tenant labels',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01007','文件编码格式异常,请联系管理人员处理','UnicodeEncodeError.*characters',0); -- 11 linkis resource 12 user resource 13 user task resouce INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,可以设置更低的驱动内存','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); @@ -398,21 +428,24 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13001','Java进程内存溢出,建议优化脚本内容','OutOfMemoryError',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13002','使用资源过大,请调优sql或者加大资源','Container killed by YARN for exceeding memory limits',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13003','使用资源过大,请调优sql或者加大资源','read record exception',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13004','引擎意外退出,可能是使用资源过大导致','failed because the engine quitted unexpectedly',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13002','任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','Container killed by YARN for exceeding memory limits',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13003','任务运行内存超过设置内存限制,请在管理台增加executor内存或调优sql后执行','read record exception',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13004','任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','failed because the engine quitted unexpectedly',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存','Spark application has already stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存','Spark application sc has already stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','Pyspark process has stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0); - +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13010','任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','Container exited with a non-zero exit code',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13011','广播表过大导致driver内存溢出,请在执行sql前增加参数后重试:set spark.sql.autoBroadcastJoinThreshold=-1;','dataFrame to local exception',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13012','driver内存不足,请增加driver内存后重试','Failed to allocate a page (\\S+.*\\)), try again.',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13013','使用spark默认变量sc导致后续代码执行失败','sc.setJobGroup(\\S+.*\\))',0); -- 21 cluster Authority 22 db Authority INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue ([A-Za-z._0-9]+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22001','%s无权限访问,请申请开通数据表权限,请联系您的数据管理人员','Permission denied:\\s*user=[a-zA-Z0-9_]+,\\s*access=[A-Z]+\\s*,\\s*inode="([a-zA-Z0-9/_\\.]+)"',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22001','%s无权限访问,请申请开通数据表权限,请联系您的数据管理人员','Permission denied:\\s*user=[a-zA-Z0-9_]+[,,]\\s*access=[a-zA-Z0-9_]+\\s*[,,]\\s*inode="([a-zA-Z0-9/_\\.]+)"',0); -- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22002','您可能没有相关权限','Permission denied',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22003','所查库表无权限','Authorization failed:No privilege',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22004','用户%s在机器不存在,请确认是否申请了相关权限','user (\\S+) does not exist',0); @@ -420,12 +453,15 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22006','用户在机器不存在,请确认是否申请了相关权限','at com.sun.security.auth.UnixPrincipal',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22007','用户在机器不存在,请确认是否申请了相关权限','LoginException: java.lang.NullPointerException: invalid null input: name',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22008','用户在机器不存在,请确认是否申请了相关权限','User not known to the underlying authentication module',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22009','用户组不存在','FileNotFoundException: /tmp/?',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22010','用户组不存在','error looking up the name of group',0); -- 30 Space exceeded 31 user operation INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30001','库超过限制','is exceeded',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('31001','用户主动kill任务','is killed by user',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('31002','您提交的EngineTypeLabel没有对应的引擎版本','EngineConnPluginNotFoundException',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30003','用户Token下发失败,请确认用户初始化是否成功。可联系BDP Hive运维处理','Auth failed for User',0); -- 41 not exist 44 sql 43 python 44 shell 45 scala 46 importExport INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41001','数据库%s不存在,请检查引用的数据库是否有误','Database ''([a-zA-Z_0-9]+)'' not found',0); @@ -455,14 +491,14 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库%s中已经存在,请删除相应表后重试','Table or view ''(\\S+)'' already exists in database ''(\\S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','Table (\\S+) already exists',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','Table already exists',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (S+) already exists',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (\\S+) already exists',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42007','插入目标表字段数量不匹配,请检查代码!','requires that the data to be inserted have the same number of columns as the target table',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42008','数据类型不匹配,请检查代码!','due to data type mismatch: differing types in',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42009','字段%s引用有误,请检查字段是否存在!','Invalid column reference (S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42010','字段%s提取数据失败','Can''t extract value from (S+): need',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42009','字段%s引用有误,请检查字段是否存在!','Invalid column reference (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42010','字段%s提取数据失败','Can''t extract value from (\\S+): need',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42011','括号或者关键字不匹配,请检查代码!','mismatched input ''(\\S+)'' expecting',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42012','group by 位置2不在select列表中,请检查代码!','GROUP BY position (S+) is not in select list',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42013','字段提取数据失败请检查字段类型','Can''t extract value from (S+): need struct type but got string',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42012','group by 位置2不在select列表中,请检查代码!','GROUP BY position (\\S+) is not in select list',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42013','字段提取数据失败请检查字段类型','Can''t extract value from (\\S+): need struct type but got string',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42014','插入数据未指定目标表字段%s,请检查代码!','Cannot insert into target table because column number/types are different ''(S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42015','表别名%s错误,请检查代码!','Invalid table alias ''(\\S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42016','UDF函数未指定参数,请检查代码!','UDFArgumentException Argument expected',0); @@ -494,30 +530,46 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43039','语法问题,请检查脚本','Distinct window functions are not supported',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43040','查询一定要指定数据源和库信息','Schema must be specified when session schema is not set',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43041','用户UDF函数 %s 加载失败,请检查后再执行','Invalid function (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43042','插入数据表动态分区数超过配置值 %s ,请优化sql或调整配置hive.exec.max.dynamic.partitions后重试','Maximum was set to (\\S+) partitions per node',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43043','执行任务消耗内存超过限制,hive任务请修改map或reduce的内存,spark任务请修改executor端内存','Error:java heap space',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43044','表 %s 分区数超过阈值 %s,需要分批删除分区,再删除表','the partitions of table (\\S+) exceeds threshold (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43045','查询/操作的表 %s 分区数为 %s ,超过阈值 %s ,需要限制查询/操作的分区数量','Number of partitions scanned \\(=(\\d+)\\) on table (\\S+) exceeds limit \\(=(\\d+)\\)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43046','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Number of dynamic partitions created is (\\S+), which is more than (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43047','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Maximum was set to (\\S+) partitions per node, number of dynamic partitions on this node: (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43048','参数引用错误,请检查参数 %s 是否正常引用','UnboundLocalError.*local variable (\\S+) referenced before assignment',0); -- 43 python INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43001','代码中存在NoneType空类型变量,请检查代码','''NoneType'' object',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43002','数组越界','IndexError:List index out of range',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43003','您的代码有语法错误,请您修改代码之后执行','SyntaxError',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43004','python代码变量%s未定义','name ''(S+)'' is not defined',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43005','python udf %s 未定义','Undefined function:s+''(S+)''',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43006','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(S+)'' and ''(S+)''',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43007','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43004','python代码变量%s未定义','name ''(\\S+)'' is not defined',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43005','python udf %s 未定义','Undefined function:s+''(\\S+)''',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43006','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(\\S+)'' and ''(\\S+)''',0); +-- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43007','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0); +-- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43008','python代码缩进对齐有误','unexpected indent',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43009','python代码缩进有误','unexpected indent',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43010','python代码反斜杠后面必须换行','unexpected character after line',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43011','导出Excel表超过最大限制1048575','Invalid row number',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43012','python save as table未指定格式,默认用parquet保存,hive查询报错','parquet.io.ParquetDecodingException',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43013','索引使用错误','IndexError',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43014','sql语法有问题','raise ParseException',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','python代码变量%s未定义','ImportError: ''(\\S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','当前节点需要的CS表解析失败,请检查当前CSID对应的CS表是否存在','Cannot parse cs table for node',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43017','存在参数无效或拼写错误,请确认 %s 参数正确性','KeyError: (\\(.+\\))',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43017','存在参数无效或拼写错误,请确认 %s 参数正确性','KeyError: (.*)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43018','文件未找到,请确认该路径( %s )是否存在','FileNotFoundError.*No such file or directory\\:\\s\'(\\S+)\'',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43019','执行表在元数据库中存在meta缓存,meta信息与缓存不一致导致,请增加参数(--conf spark.sql.hive.convertMetastoreOrc=false)后重试','Unable to alter table.*Table is not allowed to be altered',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43020','Python 进程已停止,查询失败!','python process has stopped',0); + -- 46 importExport INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46003','导出文件时无法创建目录:%s','java.io.IOException: Mkdirs failed to create (\\S+) (.+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46004','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46004','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (\\S+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46005','导出语句错误,请检查路径或命名','Illegal out script',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46006','可能是并发访问同一个HDFS文件,导致Filesystem closed问题,尝试重试','java.io.IOException: Filesystem closed\\n\\s+(at org.apache.hadoop.hdfs.DFSClient.checkOpen)',0); +-- 47 tuning +-- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('47001','诊断任务异常:%s,详细异常: %s','Tuning-Code: (\\S+), Tuning-Desc: (.+)',0); + -- 91 wtss INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91001','找不到变量值,请确认您是否设置相关变量','not find variable substitution for',0); @@ -584,6 +636,16 @@ INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'envId', '集群环境(Cluster env)', 'Cluster env', NULL, 'SELECT', NULL, 1, '集群环境(Cluster env)', 'Cluster env', NULL, NULL, NULL, @data_source, now(), now()); +select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'mongodb'; +SET @data_source=CONCAT('/data-source-manager/env-list/all/type/',@data_source_type_id); +INSERT INTO `linkis_ps_dm_datasource_type_key` + (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) +VALUES (@data_source_type_id, 'username', '用户名', NULL, 'TEXT', NULL, 1, '用户名', '^[0-9A-Za-z_-]+$', NULL, '', NULL, now(), now()), + (@data_source_type_id, 'password', '密码', NULL, 'PASSWORD', NULL, 1, '密码', '', NULL, '', NULL, now(), now()), + (@data_source_type_id, 'database', '默认库', NULL, 'TEXT', NULL, 1, '默认库', '^[0-9A-Za-z_-]+$', NULL, '', NULL, now(), now()), + (@data_source_type_id, 'host', 'Host', NULL, 'TEXT', NULL, 1, 'mongodb Host ', NULL, NULL, NULL, NULL, now(), now()), + (@data_source_type_id, 'port', '端口', NULL, 'TEXT', NULL, 1, '端口', NULL, NULL, NULL, NULL, now(), now()), + (@data_source_type_id, 'params', '连接参数', NULL, 'TEXT', NULL, 0, '输入JSON格式: {"param":"value"}', NULL, NULL, NULL, NULL, now(), now()); select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'elasticsearch'; INSERT INTO `linkis_ps_dm_datasource_type_key` diff --git a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql index 562ee9ad4d..fd8ead6289 100644 --- a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql +++ b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql @@ -27,7 +27,8 @@ ALTER TABLE `linkis_cg_ec_resource_info_record` MODIFY COLUMN `metrics` text CHA ALTER TABLE `linkis_ps_configuration_config_key` CHANGE COLUMN `validate_range` `validate_range` VARCHAR(150) NULL DEFAULT NULL COMMENT 'Validate range' COLLATE 'utf8_bin' AFTER `validate_type`; ALTER TABLE linkis_cg_tenant_label_config ADD COLUMN is_valid varchar(1) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT 'Y' COMMENT '是否有效'; - +ALTER TABLE linkis_ps_configuration_across_cluster_rule modify COLUMN rules varchar(512) CHARSET utf8mb4 COLLATE utf8mb4_bin; +ALTER TABLE linkis_cg_manager_label_value_relation ADD CONSTRAINT unlid_lvk_lvc UNIQUE (label_id,label_value_key,label_value_content); -- ---------------------------- -- Table structure for linkis_org_user @@ -48,5 +49,83 @@ CREATE TABLE `linkis_org_user` ( PRIMARY KEY (`user_name`) ) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='user org info'; +DROP TABLE IF EXISTS `linkis_ps_job_history_detail`; +DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_config`; + +-- ---------------------------- +-- Table structure for linkis_cg_tenant_department_config +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_cg_tenant_department_config`; +CREATE TABLE `linkis_cg_tenant_department_config` ( + `id` int(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', + `creator` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '应用', + `department` varchar(64) COLLATE utf8_bin NOT NULL COMMENT '部门名称', + `department_id` varchar(16) COLLATE utf8_bin NOT NULL COMMENT '部门ID', + `tenant_value` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '部门租户标签', + `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', + `create_by` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '创建用户', + `is_valid` varchar(1) COLLATE utf8_bin NOT NULL DEFAULT 'Y' COMMENT '是否有效', + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_creator_department` (`creator`,`department`) +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_org_user_sync`; +CREATE TABLE `linkis_org_user_sync` ( + `cluster_code` varchar(16) COMMENT '集群', + `user_type` varchar(64) COMMENT '用户类型', + `user_name` varchar(128) COMMENT '授权用户', + `org_id` varchar(16) COMMENT '部门ID', + `org_name` varchar(64) COMMENT '部门名字', + `queue_name` varchar(64) COMMENT '默认资源队列', + `db_name` varchar(64) COMMENT '默认操作数据库', + `interface_user` varchar(64) COMMENT '接口人', + `is_union_analyse` varchar(64) COMMENT '是否联合分析人', + `create_time` varchar(64) COMMENT '用户创建时间', + `user_itsm_no` varchar(64) COMMENT '用户创建单号', + PRIMARY KEY (`user_name`) +) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='用户部门统计INC表'; + +DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_config`; +CREATE TABLE `linkis_mg_gateway_whitelist_config` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `allowed_user` varchar(128) COLLATE utf8_bin NOT NULL, + `client_address` varchar(128) COLLATE utf8_bin NOT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `address_uniq` (`allowed_user`, `client_address`), + KEY `linkis_mg_gateway_whitelist_config_allowed_user` (`allowed_user`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_sensitive_user`; +CREATE TABLE `linkis_mg_gateway_whitelist_sensitive_user` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `sensitive_username` varchar(128) COLLATE utf8_bin NOT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `sensitive_username` (`sensitive_username`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_python_module_info`; +CREATE TABLE `linkis_ps_python_module_info` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '自增id', + `name` varchar(255) NOT NULL COMMENT 'python模块名称', + `description` text COMMENT 'python模块描述', + `path` varchar(255) NOT NULL COMMENT 'hdfs路径', + `engine_type` varchar(50) NOT NULL COMMENT '引擎类型,python/spark/all', + `create_user` varchar(50) NOT NULL COMMENT '创建用户', + `update_user` varchar(50) NOT NULL COMMENT '修改用户', + `is_load` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否加载,0-未加载,1-已加载', + `is_expire` tinyint(1) DEFAULT NULL COMMENT '是否过期,0-未过期,1-已过期)', + `create_time` datetime NOT NULL COMMENT '创建时间', + `update_time` datetime NOT NULL COMMENT '修改时间', + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; COMMENT='Python模块包信息表'; + +ALTER TABLE linkis_cg_manager_service_instance ADD COLUMN params text COLLATE utf8_bin DEFAULT NULL; + + diff --git a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql index 0c9b591a27..c3d73821df 100644 --- a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql +++ b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql @@ -17,3 +17,90 @@ select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'doris'; UPDATE linkis_ps_dm_datasource_type_key SET `require` = 0 WHERE `key` ="password" and `data_source_type_id` = @data_source_type_id; +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01006','没有健康可用的ecm节点,可能任务量大,导致节点资源处于不健康状态,尝试kill空闲引擎释放资源','There are corresponding ECM tenant labels',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01007','文件编码格式异常,请联系管理人员处理','UnicodeEncodeError.*characters',0); +UPDATE linkis_ps_error_code SET error_regex = "KeyError: (.*)" WHERE error_code = "43017"; +UPDATE linkis_ps_error_code SET error_desc = "任务实际运行内存超过了设置的内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory增加内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13002"; +UPDATE linkis_ps_configuration_config_key SET validate_range ='[\",\",\"\\\\t\",\"\\\\;\",\"\\\\|\"]',description ="取值范围:,或\\t或;或|" WHERE `key`= "pipeline.field.split"; +DELETE FROM linkis_ps_error_code WHERE error_code = "43007"; +UPDATE linkis_ps_error_code SET error_regex='Permission denied:\\s*user=[a-zA-Z0-9_]+[,,]\\s*access=[a-zA-Z0-9_]+\\s*[,,]\\s*inode="([a-zA-Z0-9/_\\.]+)"' WHERE error_code = "22001"; +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13010','任务实际运行内存超过了设置的内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory增加内存','Container exited with a non-zero exit code',0); +UPDATE linkis_ps_configuration_config_key SET `key`="pipeline.output.isoverwrite" where `key` = "pipeline.output.isoverwtite"; +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43042','插入数据表动态分区数超过配置值 %s ,请优化sql或调整配置hive.exec.max.dynamic.partitions后重试','Maximum was set to (\\S+) partitions per node',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43043','执行任务消耗内存超过限制,hive任务请修改map或reduce的内存,spark任务请修改executor端内存','Error:java heap space',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43044','表 %s 分区数超过阈值 %s,需要分批删除分区,再删除表','the partitions of table (\\S+) exceeds threshold (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43045','查询/操作的表 %s 分区数为 %s ,超过阈值 %s ,需要限制查询/操作的分区数量','Number of partitions scanned \\(=(\\d+)\\) on table (\\S+) exceeds limit \\(=(\\d+)\\)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43046','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Number of dynamic partitions created is (\\S+), which is more than (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43047','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Maximum was set to (\\S+) partitions per node, number of dynamic partitions on this node: (\\S+)',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('mapreduce.job.reduce.slowstart.completedmaps', '取值范围:0-1', 'Map任务数与总Map任务数之间的比例','0.05', 'Regex', '^(0(\\.\\d{1,2})?|1(\\.0{1,2})?)$', '0', '0', '1', 'hive引擎设置', 'hive', 'Value Range: 0-1', 'The Ratio Between The Number Of Map Tasks And The Total Number Of Map Tasks', 'Hive Engine Settings', '1'); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) +(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config +INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'hive' and config.`key` = "mapreduce.job.reduce.slowstart.completedmaps" and label_value = "*-*,hive-2.3.3"); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) +(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation +INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = (select id FROM linkis_ps_configuration_config_key where `key`="mapreduce.job.reduce.slowstart.completedmaps")AND label.label_value = '*-*,hive-2.3.3'); +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13002"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13010"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或调优sql后执行" WHERE error_code = "13003"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13004"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13005"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13006"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13007"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整executor内存或联系管理员扩容" WHERE error_code = "01002"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整executor内存或联系管理员扩容" WHERE error_code = "01003"; +-- add starrocks +INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`, `description_en`, `option_en`, `classifier_en`) VALUES ('starrocks', 'starrocks数据库', 'starrocks', 'olap', '', 4, 'StarRocks Database', 'StarRocks', 'Olap'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'host','主机名(Host)',NULL,'TEXT',NULL,1,'主机名(Host)',NULL,NULL,NULL,NULL,now(),now(),'Host','Host'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'port','TCP端口号(Port)','9030','TEXT',NULL,1,'TCP端口号',NULL,NULL,NULL,NULL,now(),now(),'Tcp_Port','Tcp_Port'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'driverClassName','驱动类名(Driver class name)','com.mysql.jdbc.Driver','TEXT',NULL,1,'驱动类名(Driver class name)','',NULL,NULL,NULL,'2024-05-23 18:28:07.0','2024-05-23 18:28:07.0','Driver class name','Driver class name'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'username','用户名(Username)',NULL,'TEXT',NULL,1,'用户名(Username)','^[0-9A-Za-z_-]+$',NULL,NULL,NULL,now(),now(),'Username','Username'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'password','密码(Password)',NULL,'PASSWORD',NULL,1,'密码(Password)','',NULL,NULL,NULL,now(),now(),'Password','Password'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'databaseName','数据库名(Database name)',NULL,'TEXT',NULL,0,'数据库名(Database name)',NULL,NULL,NULL,NULL,now(),now(),'Database name','Database name'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'params','连接参数(Connection params)',NULL,'TEXT',NULL,0,'输入JSON格式(Input JSON format): {"param":"value"}',NULL,NULL,NULL,NULL,now(),now(),'Connection params','Input JSON format: {"param":"value"}'); +INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'http_port','HTTP端口号(Port)','8030','TEXT',NULL,0,'HTTP端口号',NULL,NULL,NULL,NULL,now(),now(),'Http_Port','Http_Port'); +-- add userClientIP for tdsql +INSERT INTO linkis_ps_dm_datasource_type_key (data_source_type_id, `key`, name, default_value, value_type, `scope`, `require`, description, value_regex, ref_id, ref_value, data_source, update_time, create_time, name_en, description_en) VALUES(5, 'userClientIp', 'userClientIp', NULL, 'TEXT', 'ENV', 0, 'userClientIp', NULL, NULL, NULL, NULL, now(),now(), 'user client ip', 'user client ip'); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43019','执行表在元数据库中存在meta缓存,meta信息与缓存不一致导致,请增加参数(--conf spark.sql.hive.convertMetastoreOrc=false)后重试','Unable to alter table.*Table is not allowed to be altered',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13011','广播表过大导致driver内存溢出,请在执行sql前增加参数后重试:set spark.sql.autoBroadcastJoinThreshold=-1;','dataFrame to local exception',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43048','参数引用错误,请检查参数 %s 是否正常引用','UnboundLocalError.*local variable (\\S+) referenced before assignment',0); +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容" WHERE error_code = "01002"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容" WHERE error_code = "01003"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13005"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13006"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13007"; +UPDATE linkis_ps_error_code SET error_desc = "您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败" WHERE error_code = "13009"; +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13012','driver内存不足,请增加driver内存后重试','Failed to allocate a page (\\S+.*\\)), try again.',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13013','使用spark默认变量sc导致后续代码执行失败','sc.setJobGroup(\\S+.*\\))',0); +DELETE FROM linkis_ps_error_code WHERE error_code = "43016"; +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0); +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台调整内存参数。" WHERE error_code = "13004"; +INSERT INTO linkis_cg_manager_label (label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES ('combined_userCreator_engineType','*-IDE,nebula-3.0.0','OPTIONAL',2,now(),now()); +INSERT INTO linkis_cg_manager_label (label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES ('combined_userCreator_engineType','*-*,nebula-3.0.0','OPTIONAL',2,now(),now()); +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES ((select id from linkis_cg_manager_label where `label_value` = '*-IDE,nebula-3.0.0'), 2); +INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.host','Nebula 连接地址','Nebula 连接地址',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Host','Nebula Host',0); +INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.port','Nebula 连接端口','Nebula 连接端口',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Port','Nebula Port',0); +INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.username','Nebula 连接用户名','Nebula 连接用户名',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Username','Nebula Username',0); +INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES +('linkis.nebula.password','Nebula 连接密码','Nebula 连接密码',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Password','Nebula Password',0); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.host' and label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.port' and label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.username' and label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.password' and label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, '127.0.0.1' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.host') AND label.label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, '9669' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.port') AND label.label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.username') AND label.label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.password') AND label.label_value = '*-*,nebula-3.0.0'); +INSERT INTO linkis_ps_configuration_config_key (`key`, description, name, default_value, validate_type, validate_range, engine_conn_type, is_hidden, is_advanced, `level`, treeName, boundary_type, en_treeName, en_description, en_name, template_required) VALUES ('linkis.nebula.space', 'Nebula 图空间', 'Nebula 图空间', NULL, 'None', NULL, 'nebula', 0, 0, 1, 'Necula引擎设置', 0, 'Nebula Engine Settings', 'Nebula Space', 'Nebula Space', 0); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) ( select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.space' and label_value = '*-*,nebula-3.0.0'); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.space') AND label.label_value = '*-*,nebula-3.0.0'); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43020','Python 进程已停止,查询失败!','python process has stopped',0); +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存。" WHERE error_code = "13002"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13004"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13007"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13010"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存" WHERE error_code = "13005"; +UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存" WHERE error_code = "13006"; +update linkis_ps_dm_datasource_type_key set name='Catalogs', description='Catalogs',name_en='Catalogs',description_en='Catalogs' where data_source_type_id in (select id from linkis_ps_dm_datasource_type where name = 'starrocks') and `key` = 'databaseName';