From 57ef58b1ac4a0cb12eb1868c2b9d022c520cc836 Mon Sep 17 00:00:00 2001 From: GSHF <18663587295@sohu.com> Date: Sat, 28 Dec 2024 21:40:44 +0800 Subject: [PATCH 1/5] Add Flink configuration support - Added taskManagerCount, taskManagerMemory, and jobManagerMemory to TEngineParameter - Added FlinkConfig component - Updated localization files --- .../Editor/components/FlinkConfig/index.tsx | 159 ++++++++++++++++++ .../MetricModal/ActuatorConfigure/index.tsx | 113 +++++++++++++ .../Editor/components/MetricModal/type.ts | 20 +++ datavines-ui/src/locale/en_US.ts | 23 +++ datavines-ui/src/locale/zh_CN.ts | 25 +++ 5 files changed, 340 insertions(+) create mode 100644 datavines-ui/Editor/components/FlinkConfig/index.tsx diff --git a/datavines-ui/Editor/components/FlinkConfig/index.tsx b/datavines-ui/Editor/components/FlinkConfig/index.tsx new file mode 100644 index 000000000..8ffb8e8e4 --- /dev/null +++ b/datavines-ui/Editor/components/FlinkConfig/index.tsx @@ -0,0 +1,159 @@ +import React, { useState } from 'react'; +import { Form, Input, Radio, InputNumber, Row, Col } from 'antd'; +import { useIntl } from 'react-intl'; + +// Flink部署模式选项 +const getFlinkDeployModes = (intl: any) => [ + { label: intl.formatMessage({ id: 'dv_flink_deploy_mode_local' }), value: 'local' }, + { label: intl.formatMessage({ id: 'dv_flink_deploy_mode_yarn_session' }), value: 'yarn-session' }, + { label: intl.formatMessage({ id: 'dv_flink_deploy_mode_yarn_per_job' }), value: 'yarn-per-job' }, + { label: intl.formatMessage({ id: 'dv_flink_deploy_mode_yarn_application' }), value: 'yarn-application' }, +]; + +interface FlinkConfig { + deployMode: string; + taskManagerCount: number; + taskManagerMemory: string; + jobManagerMemory: string; + parallelism: number; + jobName: string; + yarnQueue: string; + others: string; +} + +interface FlinkConfigProps { + onChange?: (config: any) => void; + initialValues?: any; + engineType?: string; +} + +const FlinkConfig: React.FC = ({ onChange, initialValues = {}, engineType = 'flink_single_table' }) => { + const intl = useIntl(); + const [form] = Form.useForm(); + const [deployMode, setDeployMode] = useState(initialValues.deployMode || 'local'); + + const handleValuesChange = (changedValues: any, allValues: any) => { + if (changedValues.deployMode) { + setDeployMode(changedValues.deployMode); + } + + // 构建配置对象 + const config = { + env: { + deployMode: allValues.deployMode, + taskManagerCount: allValues.taskManagerCount, + taskManagerMemory: allValues.taskManagerMemory + 'G', + jobManagerMemory: allValues.jobManagerMemory + 'G', + parallelism: allValues.parallelism, + jobName: allValues.jobName, + yarnQueue: allValues.yarnQueue, + others: allValues.others + } + }; + + onChange?.(config); + }; + + const deployModes = getFlinkDeployModes(intl); + + return ( +
+ + + {deployModes.map(mode => ( + + {mode.label} + + ))} + + + + {deployMode !== 'local' && ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + )} +
+ ); +}; + +export default FlinkConfig; diff --git a/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx b/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx index a99547db0..4bb8a2012 100644 --- a/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx +++ b/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx @@ -35,6 +35,16 @@ const Index = ({ form, detail }: InnerProps) => { parameter = JSON.parse(engineParameter); } form.setFieldsValue({ +<<<<<<< HEAD + deployMode: parameter.deployMode ?? 'local', + taskManagerCount: parameter.taskManagerCount ?? 2, + taskManagerMemory: parameter.taskManagerMemory ?? '2G', + jobManagerMemory: parameter.jobManagerMemory ?? '1G', + parallelism: parameter.parallelism ?? 1, + jobName: parameter.jobName ?? '', + yarnQueue: parameter.yarnQueue ?? '', + others: parameter.others ?? '--conf flink.yarn.maxAppAttempts=1', +======= deployMode: parameter.deployMode ?? 'cluster', driverCores: parameter.driverCores ?? 1, driverMemory: parameter.driverMemory ?? '512M', @@ -42,6 +52,7 @@ const Index = ({ form, detail }: InnerProps) => { executorMemory: parameter.executorMemory ?? '2G', executorCores: parameter.executorCores ?? 2, others: parameter.others ?? '--conf spark.yarn.maxAppAttempts=1', +>>>>>>> upstream/dev tenantCode: detail?.tenantCode ? detail.tenantCode.toString() : '', env: detail?.env ? detail.env.toString() : '', engineType: detail?.engineType ? detail.engineType.toString() : 'local', @@ -128,6 +139,98 @@ const Index = ({ form, detail }: InnerProps) => { ); +<<<<<<< HEAD + const renderFlink = () => ( + <> + + + {intl.formatMessage({ id: 'dv_flink_deploy_mode_local' })} + {intl.formatMessage({ id: 'dv_flink_deploy_mode_yarn_session' })} + {intl.formatMessage({ id: 'dv_flink_deploy_mode_yarn_per_job' })} + {intl.formatMessage({ id: 'dv_flink_deploy_mode_yarn_application' })} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +======= +>>>>>>> upstream/dev return ( <Row gutter={30}> @@ -150,10 +253,20 @@ const Index = ({ form, detail }: InnerProps) => { <Form.Item noStyle dependencies={['engineType']}> {() => { const value = form.getFieldValue('engineType'); +<<<<<<< HEAD + if (value === 'spark' || value === 'livy') { + return renderSpark(); + } + if (value === 'flink') { + return renderFlink(); + } + return null; +======= if (value !== 'spark' && value !== 'livy') { return null; } return renderSpark(); +>>>>>>> upstream/dev }} </Form.Item> diff --git a/datavines-ui/Editor/components/MetricModal/type.ts b/datavines-ui/Editor/components/MetricModal/type.ts index 805f85e95..9b01d18a5 100644 --- a/datavines-ui/Editor/components/MetricModal/type.ts +++ b/datavines-ui/Editor/components/MetricModal/type.ts @@ -43,6 +43,25 @@ export type TParameterItem = { } export type TEngineParameter = { +<<<<<<< HEAD + programType: string, // JAVA + deployMode: string, + driverCores: number, + driverMemory: string, + numExecutors: number, + executorMemory: string, + executorCores: number, + others: string, + parallelism?: number, + jobName?: string, + yarnQueue?: string, + tenantCode?: string, + env?: string, + engineType?: string, + taskManagerCount?: number, + taskManagerMemory?: string, + jobManagerMemory?: string, +======= programType:string, // JAVA deployMode:string, driverCores: number, @@ -51,6 +70,7 @@ export type TEngineParameter = { executorMemory:string, executorCores: number, others: string, +>>>>>>> upstream/dev } export type TDetail = null | { diff --git a/datavines-ui/src/locale/en_US.ts b/datavines-ui/src/locale/en_US.ts index 8b2da650d..51b462438 100644 --- a/datavines-ui/src/locale/en_US.ts +++ b/datavines-ui/src/locale/en_US.ts @@ -292,6 +292,10 @@ export default { job_log_refresh: 'Refresh', job_log_download: 'Download', job_log_fullScreen: 'FullScreen', +<<<<<<< HEAD + dv_task_manager_count: 'Task Manager Count', +======= +>>>>>>> upstream/dev error_create_btn: 'Create error data store', error_table_store_name: 'Storage Name', @@ -329,4 +333,23 @@ export default { next_ten_cron_run_times: 'Next ten cron run times', view_future_execute_plan: 'view future execute plan', test_send: 'test send', +<<<<<<< HEAD + + dv_deploy_mode: 'Deploy Mode', + dv_deploy_mode_required: 'Please select deploy mode', + dv_flink_home: 'Flink Home Path', + dv_flink_home_required: 'Please enter Flink home path', + dv_jobmanager_memory: 'JobManager Memory (MB)', + dv_jobmanager_memory_required: 'Please enter JobManager memory size', + dv_taskmanager_memory: 'TaskManager Memory (MB)', + dv_taskmanager_memory_required: 'Please enter TaskManager memory size', + dv_flink_deploy_mode_local: 'Local Mode', + dv_flink_deploy_mode_yarn_session: 'Yarn Session Mode', + dv_flink_deploy_mode_yarn_per_job: 'Yarn Per-Job Mode', + dv_flink_deploy_mode_yarn_application: 'Yarn Application Mode', + dv_deploy_mode_cluster: 'Cluster Mode', + dv_deploy_mode_yarn: 'Yarn Mode', + dv_deploy_mode_local: 'Local Mode', +======= +>>>>>>> upstream/dev }; diff --git a/datavines-ui/src/locale/zh_CN.ts b/datavines-ui/src/locale/zh_CN.ts index 4b9f06a69..1a3da7e81 100644 --- a/datavines-ui/src/locale/zh_CN.ts +++ b/datavines-ui/src/locale/zh_CN.ts @@ -297,6 +297,12 @@ export default { error_title: '存储管理', user_title: '用户管理', +<<<<<<< HEAD + + dv_task_manager_count: 'Task Manager数量', + +======= +>>>>>>> upstream/dev label_title: '标签分类', label_list: '标签列表', label_add_category: '新增标签分类', @@ -327,4 +333,23 @@ export default { next_ten_cron_run_times: '未来十次执行时间', view_future_execute_plan: '查看未来执行计划', test_send: '测试发送', +<<<<<<< HEAD + + dv_deploy_mode: '部署模式', + dv_deploy_mode_required: '请选择部署模式', + dv_flink_home: 'Flink安装路径', + dv_flink_home_required: '请输入Flink安装路径', + dv_jobmanager_memory: 'JobManager内存 (MB)', + dv_jobmanager_memory_required: '请输入JobManager内存大小', + dv_taskmanager_memory: 'TaskManager内存 (MB)', + dv_taskmanager_memory_required: '请输入TaskManager内存大小', + dv_flink_deploy_mode_local: '本地模式', + dv_flink_deploy_mode_yarn_session: 'Yarn Session模式', + dv_flink_deploy_mode_yarn_per_job: 'Yarn Per-Job模式', + dv_flink_deploy_mode_yarn_application: 'Yarn Application模式', + dv_deploy_mode_cluster: '集群模式', + dv_deploy_mode_yarn: 'Yarn模式', + dv_deploy_mode_local: '本地模式', +======= +>>>>>>> upstream/dev }; From b9d8dadf6f348b3dbcb6189d99db6d9e1a384ccf Mon Sep 17 00:00:00 2001 From: GSHF <18663587295@sohu.com> Date: Sat, 28 Dec 2024 21:44:04 +0800 Subject: [PATCH 2/5] Add Flink engine implementation - Added Flink engine implementation - Updated pom.xml to include Flink engine module --- .../datavines-engine-flink-api/pom.xml | 59 ++++ .../flink/api/FlinkRuntimeEnvironment.java | 89 +++++ .../api/stream/FlinkStreamExecution.java | 101 ++++++ .../flink/api/stream/FlinkStreamSink.java | 30 ++ .../flink/api/stream/FlinkStreamSource.java | 40 +++ .../api/stream/FlinkStreamTransform.java | 40 +++ ...atavines.engine.api.env.RuntimeEnvironment | 1 + .../datavines-engine-flink-core/pom.xml | 69 ++++ .../config/BaseFlinkConfigurationBuilder.java | 227 +++++++++++++ .../flink/config/FlinkEngineConfig.java | 87 +++++ .../FlinkSingleTableConfigurationBuilder.java | 140 ++++++++ .../flink/config/FlinkSinkSqlBuilder.java | 62 ++++ .../flink/core/FlinkDataVinesBootstrap.java | 33 ++ .../engine/flink/sink/FlinkJdbcSink.java | 160 +++++++++ .../engine/flink/source/FlinkJdbcSource.java | 160 +++++++++ .../flink/transform/FlinkSqlTransform.java | 102 ++++++ ...datavines.engine.api.engine.EngineExecutor | 1 + ...ines.engine.config.JobConfigurationBuilder | 1 + ...ines.engine.config.JobConfigurationBuilder | 1 + .../datavines-engine-flink-executor/pom.xml | 59 ++++ .../flink/executor/FlinkCommandProcess.java | 148 +++++++++ .../flink/executor/FlinkEngineExecutor.java | 308 ++++++++++++++++++ .../executor/parameter/FlinkArgsUtils.java | 79 +++++ .../executor/parameter/FlinkParameters.java | 84 +++++ .../flink/executor/utils/FlinkArgsUtils.java | 71 ++++ .../flink/executor/utils/FlinkParameters.java | 83 +++++ ...datavines.engine.api.engine.EngineExecutor | 1 + .../datavines-engine-flink/pom.xml | 111 +++++++ .../datavines-engine-plugins/pom.xml | 9 +- 29 files changed, 2352 insertions(+), 4 deletions(-) create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/pom.xml create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/FlinkRuntimeEnvironment.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamExecution.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSink.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSource.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamTransform.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/resources/META-INF/plugins/io.datavines.engine.api.env.RuntimeEnvironment create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/pom.xml create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/BaseFlinkConfigurationBuilder.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkEngineConfig.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSingleTableConfigurationBuilder.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSinkSqlBuilder.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/core/FlinkDataVinesBootstrap.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/sink/FlinkJdbcSink.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/source/FlinkJdbcSource.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/transform/FlinkSqlTransform.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.config.JobConfigurationBuilder create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/services/io.datavines.engine.config.JobConfigurationBuilder create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/pom.xml create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkCommandProcess.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkEngineExecutor.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkArgsUtils.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkParameters.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkArgsUtils.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkParameters.java create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor create mode 100644 datavines-engine/datavines-engine-plugins/datavines-engine-flink/pom.xml diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/pom.xml b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/pom.xml new file mode 100644 index 000000000..0864ea39c --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/pom.xml @@ -0,0 +1,59 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <parent> + <artifactId>datavines-engine-flink</artifactId> + <groupId>io.datavines</groupId> + <version>1.0.0-SNAPSHOT</version> + </parent> + <modelVersion>4.0.0</modelVersion> + + <artifactId>datavines-engine-flink-api</artifactId> + + <dependencies> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-api</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-common</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-streaming-java_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + </dependencies> +</project> diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/FlinkRuntimeEnvironment.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/FlinkRuntimeEnvironment.java new file mode 100644 index 000000000..83a04a72c --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/FlinkRuntimeEnvironment.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.api; + +import io.datavines.common.config.CheckResult; +import io.datavines.common.config.Config; +import io.datavines.common.exception.DataVinesException; +import io.datavines.engine.api.env.Execution; +import io.datavines.engine.api.env.RuntimeEnvironment; +import io.datavines.engine.flink.api.stream.FlinkStreamExecution; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +public class FlinkRuntimeEnvironment implements RuntimeEnvironment { + + private StreamExecutionEnvironment env; + private StreamTableEnvironment tableEnv; + private Config config; + private FlinkStreamExecution execution; + + public FlinkRuntimeEnvironment() { + this.config = new Config(); + this.execution = new FlinkStreamExecution(this); + } + + public void setConfig(Config config) { + if (config != null) { + this.config = config; + } + } + + public Config getConfig() { + return config; + } + + public CheckResult checkConfig() { + return new CheckResult(true, "Configuration check passed"); + } + + public Execution getExecution() { + return execution; + } + + public void prepare() { + try { + env = StreamExecutionEnvironment.getExecutionEnvironment(); + tableEnv = StreamTableEnvironment.create(env); + } catch (Exception e) { + throw new DataVinesException("Failed to prepare Flink environment", e); + } + } + + public void stop() { + try { + if (env != null) { + // Flink's environment doesn't have a direct cancel method, + // we need to handle job cancellation through JobClient + } + } catch (Exception e) { + throw new DataVinesException("Failed to stop Flink environment", e); + } + } + + public String getType() { + return "flink"; + } + + public StreamExecutionEnvironment getEnv() { + return env; + } + + public StreamTableEnvironment getTableEnv() { + return tableEnv; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamExecution.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamExecution.java new file mode 100644 index 000000000..e877d7124 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamExecution.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.api.stream; + +import io.datavines.common.config.CheckResult; +import io.datavines.common.config.Config; +import io.datavines.engine.api.component.Component; +import io.datavines.engine.api.env.Execution; +import io.datavines.engine.api.plugin.Plugin; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.types.Row; + +import java.util.List; + +public class FlinkStreamExecution implements Execution<FlinkStreamSource, FlinkStreamTransform, FlinkStreamSink>, Plugin { + + private final FlinkRuntimeEnvironment flinkEnv; + private Config config; + + public FlinkStreamExecution(FlinkRuntimeEnvironment flinkEnv) { + this.flinkEnv = flinkEnv; + this.config = new Config(); + } + + @Override + public void setConfig(Config config) { + if (config != null) { + this.config = config; + } + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public CheckResult checkConfig() { + return new CheckResult(true, "Configuration check passed"); + } + + public String getType() { + return "flink_stream"; + } + + @Override + public void prepare() throws Exception { + // Initialization if needed + } + + @Override + public void execute(List<FlinkStreamSource> sources, List<FlinkStreamTransform> transforms, List<FlinkStreamSink> sinks) throws Exception { + for (FlinkStreamSource source : sources) { + DataStream<Row> sourceStream = source.getData(flinkEnv); + createTemporaryView(source.getClass().getSimpleName(), sourceStream, source.getFieldNames()); + + DataStream<Row> transformedStream = sourceStream; + for (FlinkStreamTransform transform : transforms) { + transformedStream = transform.process(transformedStream, flinkEnv); + createTemporaryView(transform.getClass().getSimpleName(), transformedStream, transform.getOutputFieldNames()); + } + + for (FlinkStreamSink sink : sinks) { + sink.output(transformedStream, flinkEnv); + } + } + + flinkEnv.getEnv().execute(); + } + + @Override + public void stop() throws Exception { + // Flink's execution doesn't need explicit stopping + } + + private void createTemporaryView(String tableName, DataStream<Row> dataStream, String[] fieldNames) { + StreamTableEnvironment tableEnv = flinkEnv.getTableEnv(); + Table table = tableEnv.fromDataStream(dataStream); + for (int i = 0; i < fieldNames.length; i++) { + table = table.as(fieldNames[i]); + } + tableEnv.createTemporaryView(tableName, table); + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSink.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSink.java new file mode 100644 index 000000000..df59f4f0d --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSink.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.api.stream; + +import io.datavines.engine.api.component.Component; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.types.Row; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; + +public interface FlinkStreamSink extends Component { + + /** + * 输出数据流 + */ + void output(DataStream<Row> dataStream, FlinkRuntimeEnvironment environment); +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSource.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSource.java new file mode 100644 index 000000000..11610938d --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamSource.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.api.stream; + +import io.datavines.engine.api.component.Component; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.types.Row; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; + +public interface FlinkStreamSource extends Component { + + /** + * 获取数据流 + */ + DataStream<Row> getData(FlinkRuntimeEnvironment environment); + + /** + * 获取数据Schema + */ + String[] getFieldNames(); + + /** + * 获取数据类型 + */ + Class<?>[] getFieldTypes(); +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamTransform.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamTransform.java new file mode 100644 index 000000000..804ba4a34 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/java/io/datavines/engine/flink/api/stream/FlinkStreamTransform.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.api.stream; + +import io.datavines.engine.api.component.Component; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.types.Row; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; + +public interface FlinkStreamTransform extends Component { + + /** + * 处理数据流 + */ + DataStream<Row> process(DataStream<Row> dataStream, FlinkRuntimeEnvironment environment); + + /** + * 获取输出Schema + */ + String[] getOutputFieldNames(); + + /** + * 获取输出数据类型 + */ + Class<?>[] getOutputFieldTypes(); +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/resources/META-INF/plugins/io.datavines.engine.api.env.RuntimeEnvironment b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/resources/META-INF/plugins/io.datavines.engine.api.env.RuntimeEnvironment new file mode 100644 index 000000000..298054c2f --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-api/src/main/resources/META-INF/plugins/io.datavines.engine.api.env.RuntimeEnvironment @@ -0,0 +1 @@ +flink=io.datavines.engine.flink.api.FlinkRuntimeEnvironment diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/pom.xml b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/pom.xml new file mode 100644 index 000000000..23464d5b2 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/pom.xml @@ -0,0 +1,69 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <parent> + <artifactId>datavines-engine-flink</artifactId> + <groupId>io.datavines</groupId> + <version>1.0.0-SNAPSHOT</version> + </parent> + <modelVersion>4.0.0</modelVersion> + + <artifactId>datavines-engine-flink-core</artifactId> + + <dependencies> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-flink-api</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-core</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-connector-jdbc_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-common</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-connector-api</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-config</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-common</artifactId> + <version>${project.version}</version> + </dependency> + </dependencies> +</project> diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/BaseFlinkConfigurationBuilder.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/BaseFlinkConfigurationBuilder.java new file mode 100644 index 000000000..f6e7df009 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/BaseFlinkConfigurationBuilder.java @@ -0,0 +1,227 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.config; + +import io.datavines.common.config.EnvConfig; +import io.datavines.common.config.SinkConfig; +import io.datavines.common.config.SourceConfig; +import io.datavines.common.config.enums.SinkType; +import io.datavines.common.config.enums.SourceType; +import io.datavines.common.entity.ConnectorParameter; +import io.datavines.common.entity.job.BaseJobParameter; +import io.datavines.common.exception.DataVinesException; +import io.datavines.common.utils.JSONUtils; +import io.datavines.common.utils.StringUtils; +import io.datavines.engine.common.utils.ParserUtils; +import io.datavines.engine.config.BaseJobConfigurationBuilder; +import io.datavines.connector.api.ConnectorFactory; +import io.datavines.spi.PluginLoader; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; + +import java.util.*; +import java.util.stream.Collectors; + +import static io.datavines.common.CommonConstants.*; +import static io.datavines.common.ConfigConstants.*; +import static io.datavines.common.ConfigConstants.TABLE; + +/** + * + * + * @author dataVines + * @since 2021-07-01 + */ +@Slf4j +public abstract class BaseFlinkConfigurationBuilder extends BaseJobConfigurationBuilder { + + @Override + protected EnvConfig getEnvConfig() { + EnvConfig envConfig = new EnvConfig(); + envConfig.setEngine(jobExecutionInfo.getEngineType()); + Map<String,Object> configMap = envConfig.getConfig(); + if (configMap == null) { + configMap = new HashMap<>(); + } + + ConnectorParameter connectorParameter = jobExecutionParameter.getConnectorParameter(); + String srcConnectorType = ""; + if (connectorParameter != null) { + srcConnectorType = connectorParameter.getType(); + } + + ConnectorParameter connectorParameter2 = jobExecutionParameter.getConnectorParameter2(); + String srcConnectorType2 = ""; + if (connectorParameter2 != null) { + srcConnectorType2 = connectorParameter2.getType(); + } + + envConfig.setConfig(configMap); + return envConfig; + } + + @Override + protected List<SourceConfig> getSourceConfigs() throws DataVinesException { + List<SourceConfig> sourceConfigs = new ArrayList<>(); + List<BaseJobParameter> metricJobParameterList = jobExecutionParameter.getMetricParameterList(); + boolean isAddValidateResultDataSource = false; + if (CollectionUtils.isNotEmpty(metricJobParameterList)) { + Set<String> sourceConnectorSet = new HashSet<>(); + Set<String> targetConnectorSet = new HashSet<>(); + for (BaseJobParameter parameter : metricJobParameterList) { + String metricUniqueKey = getMetricUniqueKey(parameter); + Map<String, String> metricInputParameter = metric2InputParameter.get(metricUniqueKey); + if (jobExecutionParameter.getConnectorParameter() != null) { + ConnectorParameter connectorParameter = jobExecutionParameter.getConnectorParameter(); + SourceConfig sourceConfig = new SourceConfig(); + + Map<String, Object> connectorParameterMap = new HashMap<>(connectorParameter.getParameters()); + connectorParameterMap.putAll(metricInputParameter); + + if (connectorParameter.getParameters().get(SCHEMA) != null) { + metricInputParameter.put(SCHEMA, (String)connectorParameter.getParameters().get(SCHEMA)); + } + + metricInputParameter.put(DATABASE_NAME, metricInputParameter.get(DATABASE)); + metricInputParameter.put(TABLE_NAME, metricInputParameter.get(TABLE)); + metricInputParameter.put(COLUMN_NAME, metricInputParameter.get(COLUMN)); + + ConnectorFactory connectorFactory = PluginLoader + .getPluginLoader(ConnectorFactory.class) + .getNewPlugin(connectorParameter.getType()); + + connectorParameterMap.put(TABLE, metricInputParameter.get(TABLE)); + connectorParameterMap.put(DATABASE, metricInputParameter.get(DATABASE)); + connectorParameterMap = connectorFactory.getConnectorParameterConverter().converter(connectorParameterMap); + connectorParameterMap.put(PASSWORD, ParserUtils.encode((String)connectorParameterMap.get(PASSWORD))); + + String outputTable = getOutputTable(metricInputParameter.get(DATABASE), metricInputParameter.get(SCHEMA), metricInputParameter.get(TABLE)); + String tableAlias = getTableAlias(metricInputParameter.get(DATABASE), metricInputParameter.get(SCHEMA), metricInputParameter.get(TABLE), "1"); + connectorParameterMap.put(OUTPUT_TABLE, outputTable); + connectorParameterMap.put(DRIVER, connectorFactory.getDialect().getDriver()); + + metricInputParameter.put(TABLE, outputTable); + metricInputParameter.put(TABLE_ALIAS, tableAlias); + metricInputParameter.put(COLUMN, metricInputParameter.get(COLUMN)); + metricInputParameter.put(REGEX_KEY, "REGEXP(${column}, ${regex})"); + metricInputParameter.put(NOT_REGEX_KEY, "NOT REGEXP(${column}, ${regex})"); + metricInputParameter.put(STRING_TYPE, "STRING"); + metricInputParameter.put(IF_FUNCTION_KEY, "IF"); + metricInputParameter.put(LIMIT_TOP_50_KEY, " LIMIT 50"); + metricInputParameter.put(LENGTH_KEY, "CHARACTER_LENGTH(${column})"); + metricInputParameter.put(SRC_CONNECTOR_TYPE, connectorParameter.getType()); + metricInputParameter.put(ENGINE_TYPE, jobExecutionInfo.getEngineType()); + + String connectorUUID = connectorFactory.getConnectorParameterConverter().getConnectorUUID(connectorParameterMap); + + if (sourceConnectorSet.contains(connectorUUID)) { + continue; + } + + sourceConfig.setPlugin(connectorFactory.getCategory()); + sourceConfig.setConfig(connectorParameterMap); + sourceConfig.setType(SourceType.SOURCE.getDescription()); + sourceConfigs.add(sourceConfig); + sourceConnectorSet.add(connectorUUID); + } + + if (jobExecutionParameter.getConnectorParameter2() != null + && jobExecutionParameter.getConnectorParameter2().getParameters() != null) { + ConnectorParameter connectorParameter2 = jobExecutionParameter.getConnectorParameter2(); + SourceConfig sourceConfig = new SourceConfig(); + + Map<String, Object> connectorParameterMap = new HashMap<>(connectorParameter2.getParameters()); + connectorParameterMap.putAll(metricInputParameter); + + if (connectorParameter2.getParameters().get(SCHEMA) != null) { + metricInputParameter.put(SCHEMA2, (String)connectorParameter2.getParameters().get(SCHEMA)); + } + + ConnectorFactory connectorFactory = PluginLoader + .getPluginLoader(ConnectorFactory.class) + .getNewPlugin(connectorParameter2.getType()); + + connectorParameterMap.put(TABLE, metricInputParameter.get(TABLE2)); + connectorParameterMap.put(DATABASE, metricInputParameter.get(DATABASE2)); + connectorParameterMap = connectorFactory.getConnectorParameterConverter().converter(connectorParameterMap); + connectorParameterMap.put(PASSWORD, ParserUtils.encode((String)connectorParameterMap.get(PASSWORD))); + + String outputTable = getOutputTable(metricInputParameter.get(DATABASE2), + metricInputParameter.get(SCHEMA2), + metricInputParameter.get(TABLE2)) + "_2"; + + String tableAlias = getTableAlias(metricInputParameter.get(DATABASE2), + metricInputParameter.get(SCHEMA2), + metricInputParameter.get(TABLE2), "2"); + + connectorParameterMap.put(OUTPUT_TABLE, outputTable); + connectorParameterMap.put(DRIVER, connectorFactory.getDialect().getDriver()); + + metricInputParameter.put(TABLE2, outputTable); + metricInputParameter.put(TABLE2_ALIAS, tableAlias); + + String connectorUUID = connectorFactory.getConnectorParameterConverter().getConnectorUUID(connectorParameterMap); + + if (targetConnectorSet.contains(connectorUUID)) { + continue; + } + + sourceConfig.setPlugin(connectorFactory.getCategory()); + sourceConfig.setConfig(connectorParameterMap); + sourceConfig.setType(SourceType.SOURCE.getDescription()); + sourceConfigs.add(sourceConfig); + targetConnectorSet.add(connectorUUID); + } + + metric2InputParameter.put(metricUniqueKey, metricInputParameter); + } + } + + return sourceConfigs; + } + + protected String getOutputTable(String database, String schema, String table) { + if (StringUtils.isNotEmpty(schema)) { + return String.format("%s_%s_%s", database, schema, table); + } + return String.format("%s_%s", database, table); + } + + protected String getTableAlias(String database, String schema, String table, String order) { + if (StringUtils.isNotEmpty(schema)) { + return String.format("t%s_%s_%s_%s", order, database, schema, table); + } + return String.format("t%s_%s_%s", order, database, table); + } + + protected SinkConfig getErrorSinkConfig(Map<String, String> inputParameter) { + if (FILE.equalsIgnoreCase(jobExecutionInfo.getErrorDataStorageType())) { + SinkConfig sinkConfig = new SinkConfig(); + Map<String, Object> configMap = new HashMap<>(); + Map<String,String> errorDataParameterMap = JSONUtils.toMap(jobExecutionInfo.getErrorDataStorageParameter(),String.class, String.class); + configMap.put(DATA_DIR, errorDataParameterMap.get(DATA_DIR)); + configMap.put(FILE_NAME, inputParameter.get(ERROR_DATA_FILE_NAME)); + configMap.put(COLUMN_SEPARATOR, errorDataParameterMap.get(COLUMN_SEPARATOR)); + configMap.put(LINE_SEPARATOR, errorDataParameterMap.get(LINE_SEPARATOR)); + sinkConfig.setConfig(configMap); + sinkConfig.setType(SinkType.ERROR_DATA.getDescription()); + sinkConfig.setPlugin(FILE); + return sinkConfig; + } + return null; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkEngineConfig.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkEngineConfig.java new file mode 100644 index 000000000..d4e47eb33 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkEngineConfig.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.config; + +import io.datavines.common.config.Config; +import io.datavines.common.config.CheckResult; +import io.datavines.engine.api.plugin.Plugin; +import org.apache.flink.api.common.RuntimeExecutionMode; + +import java.io.Serializable; + +public class FlinkEngineConfig implements Plugin, Serializable { + + private static final long serialVersionUID = 1L; + + private static final String CHECKPOINT_INTERVAL = "flink.checkpoint.interval"; + private static final String PARALLELISM = "flink.parallelism"; + private static final String RESTART_ATTEMPTS = "flink.restart.attempts"; + private static final String RESTART_DELAY = "flink.restart.delay"; + private static final String STATE_BACKEND = "flink.state.backend"; + private static final String CHECKPOINT_PATH = "flink.checkpoint.path"; + private static final String EXECUTION_MODE = "flink.execution.mode"; + + private Config config; + + public FlinkEngineConfig() { + this.config = new Config(); + } + + @Override + public void setConfig(Config config) { + this.config = config != null ? config : new Config(); + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public CheckResult checkConfig() { + return new CheckResult(true, ""); + } + + public long getCheckpointInterval() { + return config.getLong(CHECKPOINT_INTERVAL, 10000L); + } + + public int getParallelism() { + return config.getInt(PARALLELISM, 1); + } + + public int getRestartAttempts() { + return config.getInt(RESTART_ATTEMPTS, 3); + } + + public long getRestartDelay() { + return config.getLong(RESTART_DELAY, 10000L); + } + + public String getStateBackend() { + return config.getString(STATE_BACKEND, "memory"); + } + + public String getCheckpointPath() { + return config.getString(CHECKPOINT_PATH, ""); + } + + public RuntimeExecutionMode getExecutionMode() { + String mode = config.getString(EXECUTION_MODE, "STREAMING"); + return RuntimeExecutionMode.valueOf(mode.toUpperCase()); + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSingleTableConfigurationBuilder.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSingleTableConfigurationBuilder.java new file mode 100644 index 000000000..c839d1a9e --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSingleTableConfigurationBuilder.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.config; + +import io.datavines.common.config.EnvConfig; +import io.datavines.common.config.SinkConfig; +import io.datavines.common.config.SourceConfig; +import io.datavines.common.config.enums.SinkType; +import io.datavines.common.entity.job.BaseJobParameter; +import io.datavines.common.exception.DataVinesException; +import io.datavines.common.utils.StringUtils; +import io.datavines.engine.config.MetricParserUtils; +import io.datavines.metric.api.ExpectedValue; +import io.datavines.spi.PluginLoader; +import org.apache.commons.collections4.CollectionUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static io.datavines.common.ConfigConstants.*; + +public class FlinkSingleTableConfigurationBuilder extends BaseFlinkConfigurationBuilder { + + + @Override + public void buildEnvConfig() { + EnvConfig envConfig = new EnvConfig(); + envConfig.setEngine("flink"); + configuration.setEnvConfig(envConfig); + } + + @Override + public void buildSinkConfigs() throws DataVinesException { + List<SinkConfig> sinkConfigs = new ArrayList<>(); + + List<BaseJobParameter> metricJobParameterList = jobExecutionParameter.getMetricParameterList(); + if (CollectionUtils.isNotEmpty(metricJobParameterList)) { + for (BaseJobParameter parameter : metricJobParameterList) { + String metricUniqueKey = getMetricUniqueKey(parameter); + Map<String, String> metricInputParameter = metric2InputParameter.get(metricUniqueKey); + if (metricInputParameter == null) { + continue; + } + + // 确保必要的参数存在 + if (!metricInputParameter.containsKey(METRIC_NAME) && parameter.getMetricType() != null) { + metricInputParameter.put(METRIC_NAME, parameter.getMetricType()); + } + + metricInputParameter.put(METRIC_UNIQUE_KEY, metricUniqueKey); + String expectedType = "local_" + parameter.getExpectedType(); + ExpectedValue expectedValue = PluginLoader + .getPluginLoader(ExpectedValue.class) + .getNewPlugin(expectedType); + + // 只有在确保必要参数存在的情况下才生成 uniqueCode + if (metricInputParameter.containsKey(METRIC_NAME)) { + metricInputParameter.put(UNIQUE_CODE, StringUtils.wrapperSingleQuotes(MetricParserUtils.generateUniqueCode(metricInputParameter))); + } + + // Get the actual value storage parameter + String actualValueSinkSql = FlinkSinkSqlBuilder.getActualValueSql() + .replace("${actual_value}", "${actual_value_" + metricUniqueKey + "}"); + SinkConfig actualValueSinkConfig = getValidateResultDataSinkConfig( + expectedValue, actualValueSinkSql, "dv_actual_values", metricInputParameter); + + if (actualValueSinkConfig != null) { + actualValueSinkConfig.setType(SinkType.ACTUAL_VALUE.getDescription()); + sinkConfigs.add(actualValueSinkConfig); + } + + String taskSinkSql = FlinkSinkSqlBuilder.getDefaultSinkSql() + .replace("${actual_value}", "${actual_value_" + metricUniqueKey + "}") + .replace("${expected_value}", "${expected_value_" + metricUniqueKey + "}"); + + // Get the task data storage parameter + SinkConfig taskResultSinkConfig = getValidateResultDataSinkConfig( + expectedValue, taskSinkSql, "dv_job_execution_result", metricInputParameter); + if (taskResultSinkConfig != null) { + taskResultSinkConfig.setType(SinkType.VALIDATE_RESULT.getDescription()); + // 设置默认状态为未知(NONE) + taskResultSinkConfig.getConfig().put("default_state", "0"); + // 添加其他必要参数 + taskResultSinkConfig.getConfig().put("metric_type", "single_table"); + taskResultSinkConfig.getConfig().put("metric_name", metricInputParameter.get(METRIC_NAME)); + taskResultSinkConfig.getConfig().put("metric_dimension", metricInputParameter.get(METRIC_DIMENSION)); + taskResultSinkConfig.getConfig().put("database_name", metricInputParameter.get(DATABASE)); + taskResultSinkConfig.getConfig().put("table_name", metricInputParameter.get(TABLE)); + taskResultSinkConfig.getConfig().put("column_name", metricInputParameter.get(COLUMN)); + taskResultSinkConfig.getConfig().put("expected_type", metricInputParameter.get(EXPECTED_TYPE)); + taskResultSinkConfig.getConfig().put("result_formula", metricInputParameter.get(RESULT_FORMULA)); + sinkConfigs.add(taskResultSinkConfig); + } + + // Get the error data storage parameter if needed + if (StringUtils.isNotEmpty(jobExecutionInfo.getErrorDataStorageType()) + && StringUtils.isNotEmpty(jobExecutionInfo.getErrorDataStorageParameter())) { + SinkConfig errorDataSinkConfig = getErrorSinkConfig(metricInputParameter); + if (errorDataSinkConfig != null) { + errorDataSinkConfig.setType(SinkType.ERROR_DATA.getDescription()); + sinkConfigs.add(errorDataSinkConfig); + } + } + } + } + + configuration.setSinkParameters(sinkConfigs); + } + + @Override + public void buildTransformConfigs() { + // No transform configs needed for single table configuration + } + + @Override + public void buildSourceConfigs() throws DataVinesException { + List<SourceConfig> sourceConfigs = getSourceConfigs(); + configuration.setSourceParameters(sourceConfigs); + } + + @Override + public void buildName() { + // Use default name from base implementation + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSinkSqlBuilder.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSinkSqlBuilder.java new file mode 100644 index 000000000..568dad4b6 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/config/FlinkSinkSqlBuilder.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.config; + +public class FlinkSinkSqlBuilder { + + private FlinkSinkSqlBuilder() { + throw new IllegalStateException("Utility class"); + } + + public static String getActualValueSql() { + return "select\n" + + " '${job_execution_id}' as job_execution_id,\n" + + " '${metric_unique_key}' as metric_unique_key,\n" + + " '${unique_code}' as unique_code,\n" + + " ${actual_value} as actual_value,\n" + + " cast(null as string) as expected_value,\n" + + " cast(null as string) as operator,\n" + + " cast(null as string) as threshold,\n" + + " cast(null as string) as check_type,\n" + + " CURRENT_TIMESTAMP as create_time,\n" + + " CURRENT_TIMESTAMP as update_time\n" + + "from ${table_name}"; + } + + public static String getDefaultSinkSql() { + return "select\n" + + " '${job_execution_id}' as job_execution_id,\n" + + " '${metric_unique_key}' as metric_unique_key,\n" + + " '${unique_code}' as unique_code,\n" + + " CASE WHEN ${actual_value} IS NULL THEN NULL ELSE ${actual_value} END as actual_value,\n" + + " CASE WHEN ${expected_value} IS NULL THEN NULL ELSE ${expected_value} END as expected_value,\n" + + " '${metric_type}' as metric_type,\n" + + " '${metric_name}' as metric_name,\n" + + " '${metric_dimension}' as metric_dimension,\n" + + " '${database_name}' as database_name,\n" + + " '${table_name}' as table_name,\n" + + " '${column_name}' as column_name,\n" + + " '${operator}' as operator,\n" + + " '${threshold}' as threshold,\n" + + " '${expected_type}' as expected_type,\n" + + " '${result_formula}' as result_formula,\n" + + " CASE WHEN ${actual_value} IS NULL THEN '${default_state}' ELSE NULL END as state,\n" + + " CURRENT_TIMESTAMP as create_time,\n" + + " CURRENT_TIMESTAMP as update_time\n" + + "from ${table_name} full join ${expected_table}"; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/core/FlinkDataVinesBootstrap.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/core/FlinkDataVinesBootstrap.java new file mode 100644 index 000000000..5a0764385 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/core/FlinkDataVinesBootstrap.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.core; + +import java.util.Base64; + +import io.datavines.engine.core.BaseDataVinesBootstrap; + +public class FlinkDataVinesBootstrap extends BaseDataVinesBootstrap { + + public static void main(String[] args) { + FlinkDataVinesBootstrap bootstrap = new FlinkDataVinesBootstrap(); + if (args.length == 1) { + String arg = args[0]; + args[0] = new String(Base64.getDecoder().decode(arg)); + bootstrap.execute(args); + } + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/sink/FlinkJdbcSink.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/sink/FlinkJdbcSink.java new file mode 100644 index 000000000..391e886aa --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/sink/FlinkJdbcSink.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.core.sink; + +import io.datavines.common.config.CheckResult; +import io.datavines.common.config.Config; +import io.datavines.common.utils.StringUtils; +import io.datavines.engine.api.env.RuntimeEnvironment; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; +import io.datavines.engine.flink.api.stream.FlinkStreamSink; +import org.apache.flink.connector.jdbc.JdbcExecutionOptions; +import org.apache.flink.connector.jdbc.JdbcSink; +import org.apache.flink.connector.jdbc.JdbcConnectionOptions; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.types.Row; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSetMetaData; +import java.util.*; +import java.util.stream.Collectors; +import static io.datavines.common.ConfigConstants.*; + +import io.datavines.engine.common.utils.ParserUtils; + +public class FlinkJdbcSink implements FlinkStreamSink { + + private static final long serialVersionUID = 1L; + + private Config config = new Config(); + private transient String[] fieldNames; + private transient int batchSize = 1000; + private transient long batchIntervalMs = 200; + private transient int maxRetries = 3; + + @Override + public void setConfig(Config config) { + if(config != null) { + this.config = config; + this.batchSize = config.getInt("jdbc.batch.size", 1000); + this.batchIntervalMs = config.getLong("jdbc.batch.interval.ms", 200L); + this.maxRetries = config.getInt("jdbc.max.retries", 3); + } + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public CheckResult checkConfig() { + List<String> requiredOptions = Arrays.asList(URL, TABLE, USER, PASSWORD); + + List<String> nonExistsOptions = new ArrayList<>(); + requiredOptions.forEach(x->{ + if(!config.has(x)){ + nonExistsOptions.add(x); + } + }); + + if (!nonExistsOptions.isEmpty()) { + return new CheckResult( + false, + "please specify " + nonExistsOptions.stream().map(option -> + "[" + option + "]").collect(Collectors.joining(",")) + " as non-empty string"); + } else { + return new CheckResult(true, ""); + } + } + + @Override + public void prepare(RuntimeEnvironment env) throws Exception { + // Load JDBC driver class + String driver = config.getString(DRIVER, "com.mysql.jdbc.Driver"); + Class.forName(driver); + + // Get table metadata to initialize field names + String url = config.getString(URL); + String user = config.getString(USER); + String password = config.getString(PASSWORD); + String table = config.getString(TABLE); + + if (!StringUtils.isEmptyOrNullStr(password)) { + password = ParserUtils.decode(password); + } + + try (Connection conn = DriverManager.getConnection(url, user, password)) { + try (java.sql.PreparedStatement ps = conn.prepareStatement("SELECT * FROM " + table + " WHERE 1=0")) { + ResultSetMetaData metaData = ps.getMetaData(); + int columnCount = metaData.getColumnCount(); + + fieldNames = new String[columnCount]; + for (int i = 0; i < columnCount; i++) { + fieldNames[i] = metaData.getColumnName(i + 1); + } + } + } + } + + @Override + public void output(DataStream<Row> dataStream, FlinkRuntimeEnvironment environment) { + String url = config.getString(URL); + String table = config.getString(TABLE); + String user = config.getString(USER); + String password = config.getString(PASSWORD); + String driver = config.getString(DRIVER, "com.mysql.jdbc.Driver"); + + // Decode password if needed + if (!StringUtils.isEmptyOrNullStr(password)) { + password = ParserUtils.decode(password); + } + + // Build JDBC execution options + JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder() + .withBatchSize(batchSize) + .withBatchIntervalMs(batchIntervalMs) + .withMaxRetries(maxRetries) + .build(); + + // Create insert SQL statement + String insertSql = createInsertSql(table, fieldNames); + + // Build JDBC sink + dataStream.addSink(JdbcSink.sink( + insertSql, + (statement, row) -> { + for (int i = 0; i < fieldNames.length; i++) { + statement.setObject(i + 1, row.getField(i)); + } + }, + executionOptions, + new JdbcConnectionOptions.JdbcConnectionOptionsBuilder() + .withUrl(url) + .withDriverName(driver) + .withUsername(user) + .withPassword(password) + .build() + )); + } + + private String createInsertSql(String table, String[] fieldNames) { + String columns = String.join(", ", fieldNames); + String placeholders = String.join(", ", Collections.nCopies(fieldNames.length, "?")); + return String.format("INSERT INTO %s (%s) VALUES (%s)", table, columns, placeholders); + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/source/FlinkJdbcSource.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/source/FlinkJdbcSource.java new file mode 100644 index 000000000..9aef8533f --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/source/FlinkJdbcSource.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.core.source; + +import io.datavines.common.config.CheckResult; +import io.datavines.common.config.Config; +import io.datavines.common.utils.CryptionUtils; +import io.datavines.common.utils.StringUtils; +import io.datavines.engine.api.env.RuntimeEnvironment; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; +import io.datavines.engine.flink.api.stream.FlinkStreamSource; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.typeutils.RowTypeInfo; +import org.apache.flink.connector.jdbc.JdbcInputFormat; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.types.Row; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSetMetaData; +import java.util.*; +import java.util.stream.Collectors; + +import static io.datavines.common.ConfigConstants.*; + +public class FlinkJdbcSource implements FlinkStreamSource { + + private static final long serialVersionUID = 1L; + + private Config config = new Config(); + private transient String[] fieldNames; + private transient Class<?>[] fieldTypes; + + @Override + public void setConfig(Config config) { + if(config != null) { + this.config = config; + } + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public CheckResult checkConfig() { + List<String> requiredOptions = Arrays.asList(URL, TABLE, USER); + + List<String> nonExistsOptions = new ArrayList<>(); + requiredOptions.forEach(x->{ + if(!config.has(x)){ + nonExistsOptions.add(x); + } + }); + + if (!nonExistsOptions.isEmpty()) { + return new CheckResult( + false, + "please specify " + nonExistsOptions.stream().map(option -> + "[" + option + "]").collect(Collectors.joining(",")) + " as non-empty string"); + } else { + return new CheckResult(true, ""); + } + } + + @Override + public void prepare(RuntimeEnvironment env) throws Exception { + String driver = config.getString(DRIVER, "com.mysql.jdbc.Driver"); + Class.forName(driver); + + String url = config.getString(URL); + String user = config.getString(USER); + String password = config.getString(PASSWORD); + String table = config.getString(TABLE); + String query = config.getString(SQL, "SELECT * FROM " + table); + + if (!StringUtils.isEmptyOrNullStr(password)) { + try { + password = CryptionUtils.decryptByAES(password, "datavines"); + } catch (Exception e) { + throw new RuntimeException("Failed to decrypt password", e); + } + } + + try (Connection conn = DriverManager.getConnection(url, user, password)) { + try (java.sql.PreparedStatement ps = conn.prepareStatement(query)) { + ResultSetMetaData metaData = ps.getMetaData(); + int columnCount = metaData.getColumnCount(); + + fieldNames = new String[columnCount]; + fieldTypes = new Class<?>[columnCount]; + + for (int i = 0; i < columnCount; i++) { + fieldNames[i] = metaData.getColumnName(i + 1); + fieldTypes[i] = Class.forName(metaData.getColumnClassName(i + 1)); + } + } + } + } + + @Override + public DataStream<Row> getData(FlinkRuntimeEnvironment environment) { + String url = config.getString(URL); + String user = config.getString(USER); + String password = config.getString(PASSWORD); + String driver = config.getString(DRIVER, "com.mysql.jdbc.Driver"); + String table = config.getString(TABLE); + String query = config.getString(SQL, "SELECT * FROM " + table); + + if (!StringUtils.isEmptyOrNullStr(password)) { + try { + password = CryptionUtils.decryptByAES(password, "datavines"); + } catch (Exception e) { + throw new RuntimeException("Failed to decrypt password", e); + } + } + + TypeInformation<?>[] typeInfos = new TypeInformation[fieldTypes.length]; + for (int i = 0; i < fieldTypes.length; i++) { + typeInfos[i] = TypeInformation.of(fieldTypes[i]); + } + RowTypeInfo rowTypeInfo = new RowTypeInfo(typeInfos, fieldNames); + + JdbcInputFormat jdbcInputFormat = JdbcInputFormat.buildJdbcInputFormat() + .setDrivername(driver) + .setDBUrl(url) + .setUsername(user) + .setPassword(password) + .setQuery(query) + .setRowTypeInfo(rowTypeInfo) + .finish(); + + return environment.getEnv().createInput(jdbcInputFormat); + } + + @Override + public String[] getFieldNames() { + return fieldNames; + } + + @Override + public Class<?>[] getFieldTypes() { + return fieldTypes; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/transform/FlinkSqlTransform.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/transform/FlinkSqlTransform.java new file mode 100644 index 000000000..831aed1b0 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/java/io/datavines/engine/flink/transform/FlinkSqlTransform.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.core.transform; + +import io.datavines.common.config.CheckResult; +import io.datavines.common.config.Config; +import io.datavines.engine.api.env.RuntimeEnvironment; +import io.datavines.engine.flink.api.FlinkRuntimeEnvironment; +import io.datavines.engine.flink.api.stream.FlinkStreamTransform; +import io.datavines.engine.api.plugin.Plugin; +import io.datavines.common.utils.StringUtils; + +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.types.Row; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +import static io.datavines.common.ConfigConstants.SQL; + +public class FlinkSqlTransform implements FlinkStreamTransform, Plugin { + + private String sql; + private String[] outputFieldNames; + private Class<?>[] outputFieldTypes; + private Config config; + + @Override + public void setConfig(Config config) { + this.config = config; + if (config != null) { + this.sql = config.getString(SQL); + } + } + + @Override + public Config getConfig() { + return this.config; + } + + @Override + public CheckResult checkConfig() { + if (StringUtils.isEmptyOrNullStr(sql)) { + return new CheckResult(false, "please specify [sql] as non-empty string"); + } + return new CheckResult(true, ""); + } + + @Override + public void prepare(RuntimeEnvironment env) throws Exception { + // No special preparation needed for SQL transform + } + + @Override + public DataStream<Row> process(DataStream<Row> dataStream, FlinkRuntimeEnvironment environment) { + StreamTableEnvironment tableEnv = environment.getTableEnv(); + + // Register input table + tableEnv.createTemporaryView("input_table", dataStream); + + // Execute SQL transformation + Table resultTable = tableEnv.sqlQuery(sql); + + // Convert back to DataStream + return tableEnv.toDataStream(resultTable, Row.class); + } + + @Override + public String[] getOutputFieldNames() { + return outputFieldNames; + } + + @Override + public Class<?>[] getOutputFieldTypes() { + return outputFieldTypes; + } + + public void setSql(String sql) { + this.sql = sql; + } + + public void setOutputFieldNames(String[] outputFieldNames) { + this.outputFieldNames = outputFieldNames; + } + + public void setOutputFieldTypes(Class<?>[] outputFieldTypes) { + this.outputFieldTypes = outputFieldTypes; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor new file mode 100644 index 000000000..68045cb12 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor @@ -0,0 +1 @@ +flink=io.datavines.engine.flink.FlinkEngineExecutor diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.config.JobConfigurationBuilder b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.config.JobConfigurationBuilder new file mode 100644 index 000000000..394a4be0f --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/plugins/io.datavines.engine.config.JobConfigurationBuilder @@ -0,0 +1 @@ +flink_single_table=io.datavines.engine.flink.config.FlinkSingleTableConfigurationBuilder diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/services/io.datavines.engine.config.JobConfigurationBuilder b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/services/io.datavines.engine.config.JobConfigurationBuilder new file mode 100644 index 000000000..394a4be0f --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-core/src/main/resources/META-INF/services/io.datavines.engine.config.JobConfigurationBuilder @@ -0,0 +1 @@ +flink_single_table=io.datavines.engine.flink.config.FlinkSingleTableConfigurationBuilder diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/pom.xml b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/pom.xml new file mode 100644 index 000000000..260054fe5 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/pom.xml @@ -0,0 +1,59 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <parent> + <artifactId>datavines-engine-flink</artifactId> + <groupId>io.datavines</groupId> + <version>1.0.0-SNAPSHOT</version> + </parent> + <modelVersion>4.0.0</modelVersion> + + <artifactId>datavines-engine-flink-executor</artifactId> + + <dependencies> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-api</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-executor</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-core</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-flink-api</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-common</artifactId> + <version>${project.version}</version> + </dependency> + </dependencies> +</project> diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkCommandProcess.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkCommandProcess.java new file mode 100644 index 000000000..269c0dd97 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkCommandProcess.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.executor; + +import io.datavines.common.config.Configurations; +import io.datavines.common.entity.JobExecutionRequest; +import io.datavines.common.utils.ProcessUtils; +import io.datavines.engine.executor.core.executor.BaseCommandProcess; +import org.slf4j.Logger; +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Field; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +public class FlinkCommandProcess extends BaseCommandProcess { + + private static final String SH = "sh"; + + public FlinkCommandProcess(Consumer<List<String>> logHandler, + Logger logger, + JobExecutionRequest jobExecutionRequest, + Configurations configurations) { + super(logHandler, logger, jobExecutionRequest, configurations); + } + + @Override + protected String buildCommandFilePath() { + return String.format("%s/%s.command", jobExecutionRequest.getExecuteFilePath(), jobExecutionRequest.getJobExecutionId()); + } + + @Override + protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { + logger.info("tenant {},job dir:{}", jobExecutionRequest.getTenantCode(), jobExecutionRequest.getExecuteFilePath()); + + Path commandFilePath = Paths.get(commandFile); + // 确保父目录存在 + Files.createDirectories(commandFilePath.getParent()); + + if(Files.exists(commandFilePath)){ + Files.delete(commandFilePath); + } + + logger.info("create command file:{}",commandFile); + + StringBuilder sb = new StringBuilder(); + sb.append("#!/bin/sh\n"); + sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); + sb.append("cd $BASEDIR\n"); + sb.append("\n"); + sb.append(execCommand); + + // 设置文件权限为可执行 + Files.write(commandFilePath, sb.toString().getBytes()); + commandFilePath.toFile().setExecutable(true, false); + } + + @Override + protected String commandInterpreter() { + return SH; + } + + @Override + protected List<String> commandOptions() { + List<String> options = new LinkedList<>(); + options.add("-c"); + return options; + } + + public void cleanupTempFiles() { + try { + String commandFile = buildCommandFilePath(); + Path commandPath = Paths.get(commandFile); + if (Files.exists(commandPath)) { + Files.delete(commandPath); + logger.info("Cleaned up command file: {}", commandFile); + } + } catch (IOException e) { + logger.warn("Failed to cleanup command file", e); + } + } + + private void buildProcess(String commandFile) throws IOException { + // Create process builder + ProcessBuilder processBuilder = buildProcessBuilder(commandFile); + // merge error information to standard output stream + processBuilder.redirectErrorStream(true); + + // Print command for debugging + try { + String cmdStr = ProcessUtils.buildCommandStr(processBuilder.command()); + logger.info("job run command:\n{}", cmdStr); + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + + // Start the process + try { + Process tempProcess = processBuilder.start(); + // Use reflection to set the process field in parent class + Field processField = BaseCommandProcess.class.getDeclaredField("process"); + processField.setAccessible(true); + processField.set(this, tempProcess); + } catch (Exception e) { + logger.error("Failed to start or set process: " + e.getMessage(), e); + throw new IOException("Failed to start process", e); + } + } + + private ProcessBuilder buildProcessBuilder(String commandFile) { + List<String> commandList = new ArrayList<>(); + + // 直接执行命令,不使用sudo + commandList.add(commandInterpreter()); + commandList.addAll(commandOptions()); + commandList.add(commandFile); + + ProcessBuilder processBuilder = new ProcessBuilder(commandList); + processBuilder.directory(new File(jobExecutionRequest.getExecuteFilePath())); + + // 添加环境变量 + Map<String, String> env = processBuilder.environment(); + env.put("FLINK_HOME", System.getenv("FLINK_HOME")); + + return processBuilder; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkEngineExecutor.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkEngineExecutor.java new file mode 100644 index 000000000..8d9b1e1bd --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/FlinkEngineExecutor.java @@ -0,0 +1,308 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.executor; + +import org.slf4j.Logger; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.datavines.common.config.Configurations; +import io.datavines.common.entity.JobExecutionRequest; +import io.datavines.common.entity.ProcessResult; +import io.datavines.common.enums.ExecutionStatus; +import io.datavines.common.utils.LoggerUtils; +import io.datavines.engine.executor.core.base.AbstractYarnEngineExecutor; +import io.datavines.engine.executor.core.executor.BaseCommandProcess; +import io.datavines.engine.flink.executor.utils.FlinkParameters; +import io.datavines.common.utils.YarnUtils; +import org.apache.commons.lang3.StringUtils; + +import java.io.File; +import java.util.List; + +public class FlinkEngineExecutor extends AbstractYarnEngineExecutor { + + private static final String FLINK_COMMAND = "flink"; + private Configurations configurations; + private JobExecutionRequest jobExecutionRequest; + private Logger logger; + private ProcessResult processResult; + private BaseCommandProcess shellCommandProcess; + private boolean cancel; + + @Override + public void init(JobExecutionRequest jobExecutionRequest, Logger logger, Configurations configurations) throws Exception { + String threadLoggerInfoName = String.format(LoggerUtils.JOB_LOG_INFO_FORMAT, jobExecutionRequest.getJobExecutionUniqueId()); + Thread.currentThread().setName(threadLoggerInfoName); + + this.jobExecutionRequest = jobExecutionRequest; + this.logger = logger; + this.configurations = configurations; + this.processResult = new ProcessResult(); + this.shellCommandProcess = new FlinkCommandProcess( + this::logHandle, + logger, + jobExecutionRequest, + configurations + ); + } + + @Override + public void execute() throws Exception { + try { + String command = buildCommand(); + logger.info("flink task command: {}", command); + ProcessResult result = shellCommandProcess.run(command); + + // Check exit code and set execution result + if (result.getExitStatusCode() == ExecutionStatus.SUCCESS.getCode()) { + processResult.setExitStatusCode(ExecutionStatus.SUCCESS.getCode()); + processResult.setProcessId(Integer.valueOf(String.valueOf(jobExecutionRequest.getJobExecutionId()))); + logger.info("Flink job executed successfully"); + } else { + processResult.setExitStatusCode(ExecutionStatus.FAILURE.getCode()); + processResult.setProcessId(Integer.valueOf(String.valueOf(jobExecutionRequest.getJobExecutionId()))); + String errorMsg = String.format("Flink job execution failed with exit code: %d", result.getExitStatusCode()); + logger.error(errorMsg); + throw new RuntimeException(errorMsg); + } + } catch (Exception e) { + logger.error("flink task error", e); + processResult.setExitStatusCode(ExecutionStatus.FAILURE.getCode()); + processResult.setProcessId(Integer.valueOf(String.valueOf(jobExecutionRequest.getJobExecutionId()))); + throw e; + } + } + + @Override + public void after() throws Exception { + try { + if (shellCommandProcess != null) { + ((FlinkCommandProcess)shellCommandProcess).cleanupTempFiles(); + } + } catch (Exception e) { + logger.error("Error in after execution", e); + // 不抛出异常,避免影响主流程 + } + } + + @Override + public ProcessResult getProcessResult() { + return this.processResult; + } + + @Override + public JobExecutionRequest getTaskRequest() { + return this.jobExecutionRequest; + } + + @Override + protected String buildCommand() { + FlinkParameters parameters = new FlinkParameters(); + + // 从applicationParameter中获取部署模式 + String deployMode = null; + JsonNode envNode = null; + try { + String applicationParameter = jobExecutionRequest.getApplicationParameter(); + if (applicationParameter != null) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode jsonNode = mapper.readTree(applicationParameter); + envNode = jsonNode.get("env"); + if (envNode != null && envNode.has("deployMode")) { + deployMode = envNode.get("deployMode").asText(); + } + } + } catch (Exception e) { + logger.error("Failed to parse applicationParameter", e); + } + + // 如果applicationParameter中没有deployMode,则从configurations中获取 + if (deployMode == null) { + deployMode = configurations.getString("deployMode", "local"); // 默认使用local模式 + } + + logger.info("Using deploy mode: {}", deployMode); + parameters.setDeployMode(deployMode); + + // 获取 FLINK_HOME + String flinkHome = System.getenv("FLINK_HOME"); + if (flinkHome == null || flinkHome.trim().isEmpty()) { + // 从配置中获取 + flinkHome = configurations.getString("flink.home"); + if (flinkHome == null || flinkHome.trim().isEmpty()) { + // 使用默认路径 + flinkHome = "/opt/flink"; + logger.info("FLINK_HOME not set, using default path: {}", flinkHome); + } + } + + // 检查Flink目录是否存在 + File flinkDir = new File(flinkHome); + if (!flinkDir.exists() || !flinkDir.isDirectory()) { + logger.warn("Flink directory not found at: {}. Please make sure Flink is properly installed.", flinkHome); + } + + // 构建完整的 Flink 命令路径 + StringBuilder command = new StringBuilder(); + command.append(flinkHome); + command.append("/bin/").append(FLINK_COMMAND); + + // 根据部署模式添加不同的参数 + switch (deployMode.toLowerCase()) { + case "yarn-session": + command.append(" run"); + command.append(" -t yarn-session"); // 指定运行模式为 yarn-session + command.append(" -Dyarn.application.name=").append(jobExecutionRequest.getJobExecutionName()); + // 添加yarn-session特定的内存配置 + if (envNode != null) { + String jobManagerMemory = envNode.get("jobmanager.memory.process.size").asText("1024m"); + String taskManagerMemory = envNode.get("taskmanager.memory.process.size").asText("1024m"); + command.append(" -Djobmanager.memory.process.size=").append(jobManagerMemory); + command.append(" -Dtaskmanager.memory.process.size=").append(taskManagerMemory); + } + break; + case "yarn-per-job": + command.append(" run"); + command.append(" -t yarn-per-job"); // 指定运行模式为 yarn-per-job + command.append(" -Dyarn.application.name=").append(jobExecutionRequest.getJobExecutionName()); + // 添加yarn-per-job特定的内存配置 + if (envNode != null) { + String jobManagerMemory = envNode.get("jobmanager.memory.process.size").asText("1024m"); + String taskManagerMemory = envNode.get("taskmanager.memory.process.size").asText("1024m"); + command.append(" -Djobmanager.memory.process.size=").append(jobManagerMemory); + command.append(" -Dtaskmanager.memory.process.size=").append(taskManagerMemory); + } + break; + case "yarn-application": + command.append(" run-application"); + command.append(" -t yarn-application"); // 指定运行模式为 yarn-application + command.append(" -Dyarn.application.name=").append(jobExecutionRequest.getJobExecutionName()); + // 添加yarn-application特定的内存配置 + if (envNode != null) { + String jobManagerMemory = envNode.get("jobmanager.memory.process.size").asText("1024m"); + String taskManagerMemory = envNode.get("taskmanager.memory.process.size").asText("1024m"); + command.append(" -Djobmanager.memory.process.size=").append(jobManagerMemory); + command.append(" -Dtaskmanager.memory.process.size=").append(taskManagerMemory); + } + break; + case "local": + default: + command.append(" run"); + // 本地模式不需要添加额外的部署模式参数 + break; + } + + // 添加通用参数 + command.append(" -p 1"); // 设置并行度为1 + command.append(" -c ").append(configurations.getString("mainClass", "io.datavines.engine.flink.core.FlinkDataVinesBootstrap")); + + // 添加主jar包 + String mainJar = configurations.getString("mainJar", flinkHome + "/lib/datavines-flink-core.jar"); + command.append(" ").append(mainJar); + + return command.toString(); + } + + public String getApplicationId() { + return processResult != null ? processResult.getApplicationId() : null; + } + + public String getApplicationUrl() { + return processResult != null ? YarnUtils.getApplicationUrl(processResult.getApplicationId()) : null; + } + + @Override + public void cancel() throws Exception { + cancel = true; + if (shellCommandProcess != null) { + shellCommandProcess.cancel(); + } + killYarnApplication(); + } + + private void killYarnApplication() { + try { + String applicationId = YarnUtils.getYarnAppId(jobExecutionRequest.getTenantCode(), + jobExecutionRequest.getJobExecutionUniqueId()); + + if (StringUtils.isNotEmpty(applicationId)) { + // sudo -u user command to run command + String cmd = String.format("sudo -u %s yarn application -kill %s", + jobExecutionRequest.getTenantCode(), + applicationId); + + logger.info("yarn application -kill {}", applicationId); + Runtime.getRuntime().exec(cmd); + } + } catch (Exception e) { + logger.error("kill yarn application failed", e); + } + } + + @Override + public void logHandle(List<String> logs) { + // 处理日志输出 + if (logs != null && !logs.isEmpty()) { + for (String log : logs) { + logger.info(log); + // 可以在这里添加对日志的解析,比如提取applicationId等信息 + if (log.contains("Job has been submitted with JobID")) { + String jobId = extractJobId(log); + if (jobId != null) { + processResult.setProcessId(Integer.valueOf(jobId)); + } + } + // 尝试从日志中提取Yarn Application ID + if (log.contains("Submitted application")) { + String appId = extractYarnAppId(log); + if (appId != null) { + processResult.setApplicationId(appId); + } + } + } + } + } + + private String extractJobId(String log) { + // 从日志中提取JobID的简单实现 + int index = log.indexOf("JobID"); + if (index != -1) { + return log.substring(index).trim(); + } + return null; + } + + private String extractYarnAppId(String log) { + // 从日志中提取Yarn Application ID的简单实现 + int index = log.indexOf("application_"); + if (index != -1) { + String appId = log.substring(index); + // 提取到第一个空格为止 + int spaceIndex = appId.indexOf(" "); + if (spaceIndex != -1) { + appId = appId.substring(0, spaceIndex); + } + return appId; + } + return null; + } + + @Override + public boolean isCancel() { + return cancel; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkArgsUtils.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkArgsUtils.java new file mode 100644 index 000000000..7afa91a44 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkArgsUtils.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.executor.parameter; + +import java.util.ArrayList; +import java.util.List; + +public class FlinkArgsUtils { + + private static final String FLINK_CLUSTER = "cluster"; + private static final String FLINK_LOCAL = "local"; + private static final String FLINK_YARN = "yarn"; + + private FlinkArgsUtils() { + throw new IllegalStateException("Utility class"); + } + + public static List<String> buildArgs(FlinkParameters param) { + List<String> args = new ArrayList<>(); + + // Add run command + args.add("run"); + + // Add deployment mode + if (FLINK_CLUSTER.equals(param.getDeployMode())) { + args.add("-m"); + args.add("yarn-cluster"); + } else if (FLINK_YARN.equals(param.getDeployMode())) { + args.add("-m"); + args.add("yarn-session"); + } + + // Add parallelism + if (param.getParallelism() > 0) { + args.add("-p"); + args.add(String.valueOf(param.getParallelism())); + } + + // Add job name if specified + if (param.getJobName() != null && !param.getJobName().isEmpty()) { + args.add("-Dyarn.application.name=" + param.getJobName()); + } + + // Add yarn queue if specified + if (param.getYarnQueue() != null && !param.getYarnQueue().isEmpty()) { + args.add("-Dyarn.application.queue=" + param.getYarnQueue()); + } + + // Add main class + if (param.getMainClass() != null && !param.getMainClass().isEmpty()) { + args.add("-c"); + args.add(param.getMainClass()); + } + + // Add jar file + args.add(param.getMainJar()); + + // Add program arguments if any + if (param.getMainArgs() != null && !param.getMainArgs().isEmpty()) { + args.add(param.getMainArgs()); + } + + return args; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkParameters.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkParameters.java new file mode 100644 index 000000000..ea84c4dd2 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/parameter/FlinkParameters.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.executor.parameter; + +public class FlinkParameters { + + private String mainJar; + private String mainClass; + private String deployMode; + private String mainArgs; + private String yarnQueue; + private String jobName; + private int parallelism = 1; + + public String getMainJar() { + return mainJar; + } + + public void setMainJar(String mainJar) { + this.mainJar = mainJar; + } + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public String getDeployMode() { + return deployMode; + } + + public void setDeployMode(String deployMode) { + this.deployMode = deployMode; + } + + public String getMainArgs() { + return mainArgs; + } + + public void setMainArgs(String mainArgs) { + this.mainArgs = mainArgs; + } + + public String getYarnQueue() { + return yarnQueue; + } + + public void setYarnQueue(String yarnQueue) { + this.yarnQueue = yarnQueue; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public int getParallelism() { + return parallelism; + } + + public void setParallelism(int parallelism) { + this.parallelism = parallelism; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkArgsUtils.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkArgsUtils.java new file mode 100644 index 000000000..b6c4318f9 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkArgsUtils.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.executor.utils; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; + +public class FlinkArgsUtils { + + public static List<String> buildArgs(FlinkParameters parameters) { + List<String> args = new ArrayList<>(); + + // Add run command + args.add("run"); + + // Add deploy mode + if ("cluster".equalsIgnoreCase(parameters.getDeployMode())) { + args.add("-m"); + args.add("yarn-cluster"); + } + + // Add parallelism + if (parameters.getParallelism() > 0) { + args.add("-p"); + args.add(String.valueOf(parameters.getParallelism())); + } + + // Add job name + if (StringUtils.isNotEmpty(parameters.getJobName())) { + args.add("-Dyarn.application.name=" + parameters.getJobName()); + } + + // Add yarn queue + if (StringUtils.isNotEmpty(parameters.getYarnQueue())) { + args.add("-Dyarn.application.queue=" + parameters.getYarnQueue()); + } + + // Add main jar + if (StringUtils.isNotEmpty(parameters.getMainJar())) { + args.add(parameters.getMainJar()); + } + + // Add main class + if (StringUtils.isNotEmpty(parameters.getMainClass())) { + args.add(parameters.getMainClass()); + } + + // Add program arguments + if (StringUtils.isNotEmpty(parameters.getMainArgs())) { + args.add(parameters.getMainArgs()); + } + + return args; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkParameters.java b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkParameters.java new file mode 100644 index 000000000..039b546b7 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/java/io/datavines/engine/flink/executor/utils/FlinkParameters.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.engine.flink.executor.utils; + +public class FlinkParameters { + private String mainJar; + private String mainClass; + private String deployMode; + private String mainArgs; + private int parallelism; + private String jobName; + private String yarnQueue; + + public String getMainJar() { + return mainJar; + } + + public void setMainJar(String mainJar) { + this.mainJar = mainJar; + } + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public String getDeployMode() { + return deployMode; + } + + public void setDeployMode(String deployMode) { + this.deployMode = deployMode; + } + + public String getMainArgs() { + return mainArgs; + } + + public void setMainArgs(String mainArgs) { + this.mainArgs = mainArgs; + } + + public int getParallelism() { + return parallelism; + } + + public void setParallelism(int parallelism) { + this.parallelism = parallelism; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getYarnQueue() { + return yarnQueue; + } + + public void setYarnQueue(String yarnQueue) { + this.yarnQueue = yarnQueue; + } +} diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor new file mode 100644 index 000000000..931d94a8b --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/datavines-engine-flink-executor/src/main/resources/META-INF/plugins/io.datavines.engine.api.engine.EngineExecutor @@ -0,0 +1 @@ +flink=io.datavines.engine.flink.executor.FlinkEngineExecutor diff --git a/datavines-engine/datavines-engine-plugins/datavines-engine-flink/pom.xml b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/pom.xml new file mode 100644 index 000000000..b9e104bb8 --- /dev/null +++ b/datavines-engine/datavines-engine-plugins/datavines-engine-flink/pom.xml @@ -0,0 +1,111 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <parent> + <artifactId>datavines-engine-plugins</artifactId> + <groupId>io.datavines</groupId> + <version>1.0.0-SNAPSHOT</version> + </parent> + <modelVersion>4.0.0</modelVersion> + + <artifactId>datavines-engine-flink</artifactId> + <packaging>pom</packaging> + + <modules> + <module>datavines-engine-flink-api</module> + <module>datavines-engine-flink-core</module> + <module>datavines-engine-flink-executor</module> + </modules> + + <properties> + <flink.version>1.13.6</flink.version> + <scala.binary.version>2.11</scala.binary.version> + </properties> + + <repositories> + <repository> + <id>central</id> + <name>aliyun maven</name> + <url>https://maven.aliyun.com/repository/public</url> + <releases> + <enabled>true</enabled> + </releases> + <snapshots> + <enabled>true</enabled> + </snapshots> + </repository> + <repository> + <id>apache</id> + <name>apache maven</name> + <url>https://repository.apache.org/content/repositories/releases/</url> + <releases> + <enabled>true</enabled> + </releases> + <snapshots> + <enabled>false</enabled> + </snapshots> + </repository> + </repositories> + + <dependencies> + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-api</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-java</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-streaming-java_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-clients_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.flink</groupId> + <artifactId>flink-table-planner_${scala.binary.version}</artifactId> + <version>${flink.version}</version> + <scope>provided</scope> + </dependency> + </dependencies> +</project> diff --git a/datavines-engine/datavines-engine-plugins/pom.xml b/datavines-engine/datavines-engine-plugins/pom.xml index d27ae2fa5..57112b5f0 100644 --- a/datavines-engine/datavines-engine-plugins/pom.xml +++ b/datavines-engine/datavines-engine-plugins/pom.xml @@ -1,21 +1,21 @@ <?xml version="1.0" encoding="UTF-8"?> <!-- - + Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 - + Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - + --> <project xmlns="http://maven.apache.org/POM/4.0.0" @@ -33,6 +33,7 @@ <modules> <module>datavines-engine-spark</module> + <module>datavines-engine-flink</module> <module>datavines-engine-local</module> <module>datavines-engine-livy</module> </modules> From 2d5116f737a2a66c8f5165987ba1130b0f78681d Mon Sep 17 00:00:00 2001 From: GSHF <18663587295@sohu.com> Date: Sat, 28 Dec 2024 21:46:43 +0800 Subject: [PATCH 3/5] Add all changes from backup - Added common module changes - Added engine config changes - Added metric module changes - Added registry module changes - Added server module changes - Added UI module changes - Added deployment configurations --- .../io/datavines/common/ConfigConstants.java | 11 ++ .../io/datavines/common/config/Config.java | 24 ++++ .../config/BaseJobConfigurationBuilder.java | 8 ++ .../io.datavines.metric.api.ExpectedValue | 7 +- .../io.datavines.metric.api.ExpectedValue | 7 +- .../metric/expected/plugin/Last30DayAvg.java | 69 ++++++++++ .../io.datavines.metric.api.ExpectedValue | 7 +- .../io.datavines.metric.api.ExpectedValue | 7 +- .../io.datavines.metric.api.ExpectedValue | 7 +- .../io.datavines.metric.api.ExpectedValue | 7 +- .../io.datavines.metric.api.ExpectedValue | 7 +- .../io.datavines.metric.api.ExpectedValue | 7 +- .../metric/expected/plugin/WeeklyAvg.java | 69 ++++++++++ .../io.datavines.metric.api.ExpectedValue | 7 +- .../metric/plugin/CustomAggregateSql.java | 116 ++++++++++++++++ .../datavines/registry/plugin/MysqlMutex.java | 16 +++ .../plugin/MysqlServerStateManager.java | 33 +++++ datavines-server/pom.xml | 61 +++++++++ .../server/api/config/WebMvcConfig.java | 12 ++ .../controller/JobExecutionController.java | 126 ++++++++++++++++++ .../JobQualityReportController.java | 86 ++++++++++++ .../api/controller/MetricController.java | 5 + .../bo/job/JobExecutionDashboardParam.java | 43 ++++++ .../api/dto/bo/job/JobExecutionPageParam.java | 59 ++++++++ .../job/JobQualityReportDashboardParam.java | 46 +++++++ .../src/main/resources/application.yaml | 14 ++ .../Editor/components/JobConfig/index.tsx | 75 +++++++++++ .../MetricModal/RunEvnironment/index.tsx | 7 + datavines-ui/Editor/locale/en_US.ts | 8 ++ datavines-ui/Editor/locale/zh_CN.ts | 6 + deploy/compose/docker-compose.yaml | 4 + deploy/docker/Dockerfile | 13 ++ 32 files changed, 965 insertions(+), 9 deletions(-) create mode 100644 datavines-ui/Editor/components/JobConfig/index.tsx diff --git a/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java b/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java index 07ffffa78..3ccee963e 100644 --- a/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java +++ b/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java @@ -107,7 +107,11 @@ public class ConfigConstants { public static final String OUTPUT_TABLE = "output_table"; public static final String TMP_TABLE = "tmp_table"; public static final String COLUMN_SEPARATOR = "column_separator"; +<<<<<<< HEAD + public static final String LINE_SEPARATOR = "line_separator"; +======= public static final String LINE_SEPERATOR = "line_separator"; +>>>>>>> upstream/dev public static final String DATA_DIR = "data_dir"; public static final String ENABLE_SPARK_HIVE_SUPPORT = "enable_spark_hive_support"; @@ -169,4 +173,11 @@ public class ConfigConstants { public static final String SAVE_MODE = "save_mode"; public static final String UPSERT = "Upsert"; +<<<<<<< HEAD + + public static final String FILE_NAME = "file_name"; + + public static final String FLINK = "flink"; +======= +>>>>>>> upstream/dev } diff --git a/datavines-common/src/main/java/io/datavines/common/config/Config.java b/datavines-common/src/main/java/io/datavines/common/config/Config.java index 2b794429e..7994c7c2a 100644 --- a/datavines-common/src/main/java/io/datavines/common/config/Config.java +++ b/datavines-common/src/main/java/io/datavines/common/config/Config.java @@ -43,6 +43,14 @@ public String getString(String key){ return String.valueOf(config.get(key)); } +<<<<<<< HEAD + public String getString(String key, String defaultValue) { + Object value = config.get(key); + return value != null ? String.valueOf(value) : defaultValue; + } + +======= +>>>>>>> upstream/dev public List<String> getStringList(String key){ return (List<String>)config.get(key); } @@ -51,6 +59,14 @@ public Integer getInt(String key){ return Integer.valueOf(String.valueOf(config.get(key))); } +<<<<<<< HEAD + public Integer getInt(String key, Integer defaultValue) { + Object value = config.get(key); + return value != null ? Integer.valueOf(String.valueOf(value)) : defaultValue; + } + +======= +>>>>>>> upstream/dev public Boolean getBoolean(String key){ return Boolean.valueOf(String.valueOf(config.get(key))); } @@ -63,6 +79,14 @@ public Long getLong(String key){ return Long.valueOf(String.valueOf(config.get(key))); } +<<<<<<< HEAD + public Long getLong(String key, Long defaultValue) { + Object value = config.get(key); + return value != null ? Long.valueOf(String.valueOf(value)) : defaultValue; + } + +======= +>>>>>>> upstream/dev public Boolean has(String key) { return config.get(key) != null; } diff --git a/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java b/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java index 38620164c..99912111a 100644 --- a/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java +++ b/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java @@ -70,13 +70,21 @@ public void init(Map<String, String> inputParameter, JobExecutionInfo jobExecuti this.inputParameter.put(COLUMN_SEPARATOR, errorDataParameterMap.get(CommonPropertyUtils.COLUMN_SEPARATOR) == null ? CommonPropertyUtils.COLUMN_SEPARATOR_DEFAULT : errorDataParameterMap.get(CommonPropertyUtils.COLUMN_SEPARATOR)); +<<<<<<< HEAD + this.inputParameter.put(LINE_SEPARATOR, +======= this.inputParameter.put(LINE_SEPERATOR, +>>>>>>> upstream/dev errorDataParameterMap.get(CommonPropertyUtils.LINE_SEPARATOR) == null ? CommonPropertyUtils.LINE_SEPARATOR_DEFAULT : errorDataParameterMap.get(CommonPropertyUtils.LINE_SEPARATOR)); } else { this.inputParameter.put(ERROR_DATA_DIR, CommonPropertyUtils.getString(CommonPropertyUtils.ERROR_DATA_DIR, CommonPropertyUtils.ERROR_DATA_DIR_DEFAULT)); this.inputParameter.put(COLUMN_SEPARATOR, CommonPropertyUtils.getString(CommonPropertyUtils.COLUMN_SEPARATOR, CommonPropertyUtils.COLUMN_SEPARATOR_DEFAULT)); +<<<<<<< HEAD + this.inputParameter.put(LINE_SEPARATOR, CommonPropertyUtils.getString(CommonPropertyUtils.LINE_SEPARATOR, CommonPropertyUtils.LINE_SEPARATOR_DEFAULT)); +======= this.inputParameter.put(LINE_SEPERATOR, CommonPropertyUtils.getString(CommonPropertyUtils.LINE_SEPARATOR, CommonPropertyUtils.LINE_SEPARATOR_DEFAULT)); +>>>>>>> upstream/dev } if (FILE.equalsIgnoreCase(jobExecutionInfo.getValidateResultDataStorageType())) { diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-daily-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-daily-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index af948d983..709160dbd 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-daily-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-daily-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_daily_avg=io.datavines.metric.expected.plugin.DailyAvg spark_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg -livy_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg \ No newline at end of file +<<<<<<< HEAD +livy_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg +flink_daily_avg=io.datavines.metric.expected.plugin.DailyAvg +======= +livy_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-fix/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-fix/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index b53917884..c0dda2436 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-fix/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-fix/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_fix_value=io.datavines.metric.expected.plugin.FixValue spark_fix_value=io.datavines.metric.expected.plugin.FixValue -livy_fix_value=io.datavines.metric.expected.plugin.FixValue \ No newline at end of file +<<<<<<< HEAD +livy_fix_value=io.datavines.metric.expected.plugin.FixValue +flink_fix_value=io.datavines.metric.expected.plugin.FixValue +======= +livy_fix_value=io.datavines.metric.expected.plugin.FixValue +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java index 33ad121bd..9796d2b92 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java @@ -1,3 +1,71 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.metric.expected.plugin; + +import io.datavines.metric.api.ExpectedValue; + +import java.util.Map; + +import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY; + +public class Last30DayAvg implements ExpectedValue { + + @Override + public String getName() { + return "last_30d_avg"; + } + + @Override + public String getZhName() { + return "最近30天均值"; + } + + @Override + public String getKey(Map<String,String> inputParameter) { + String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); + return "expected_value_" + uniqueKey; + } + + @Override + public String getExecuteSql(Map<String,String> inputParameter) { + String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); + return "select round(avg(actual_value),2) as expected_value_" + uniqueKey + + " from dv_actual_values where data_time >= date_sub(date_format(${data_time},'%Y-%m-%d'),interval 30 DAY)" + + " and data_time < date_add(date_format(${data_time},'%Y-%m-%d'),interval 1 DAY) and unique_code = ${unique_code}"; + } + + @Override + public String getOutputTable(Map<String,String> inputParameter) { + String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); + return "last_30d_" + uniqueKey; + } + + @Override + public boolean isNeedDefaultDatasource() { + return true; + } + + @Override + public void prepare(Map<String, String> config) { + + } +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -64,3 +132,4 @@ public void prepare(Map<String, String> config) { } } +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index 0031556b6..35dfd6ba3 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_last_30d_avg=io.datavines.metric.expected.plugin.Last30DayAvg spark_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg -livy_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg \ No newline at end of file +<<<<<<< HEAD +livy_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg +flink_last_30d_avg=io.datavines.metric.expected.plugin.Last30DayAvg +======= +livy_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last7day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last7day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index 391573b28..f054cd7eb 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last7day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last7day-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_last_7d_avg=io.datavines.metric.expected.plugin.Last7DayAvg spark_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg -livy_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg \ No newline at end of file +<<<<<<< HEAD +livy_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg +flink_last_7d_avg=io.datavines.metric.expected.plugin.Last7DayAvg +======= +livy_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-monthly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-monthly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index c969402c4..6fd7228da 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-monthly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-monthly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_monthly_avg=io.datavines.metric.expected.plugin.MonthlyAvg spark_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg -livy_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg \ No newline at end of file +<<<<<<< HEAD +livy_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg +flink_monthly_avg=io.datavines.metric.expected.plugin.MonthlyAvg +======= +livy_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-none/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-none/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index c23fce078..e02f2ed4d 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-none/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-none/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_none=io.datavines.metric.expected.plugin.None spark_none=io.datavines.metric.expected.plugin.SparkNone -livy_none=io.datavines.metric.expected.plugin.SparkNone \ No newline at end of file +<<<<<<< HEAD +livy_none=io.datavines.metric.expected.plugin.SparkNone +flink_none=io.datavines.metric.expected.plugin.None +======= +livy_none=io.datavines.metric.expected.plugin.SparkNone +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index e0db566b2..675899aae 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_table_total_rows=io.datavines.metric.expected.plugin.TableTotalRows spark_table_total_rows=io.datavines.metric.expected.plugin.TableTotalRows -livy_table_total_rows=io.datavines.metric.expected.plugin.TableTotalRows \ No newline at end of file +<<<<<<< HEAD +livy_table_total_rows=io.datavines.metric.expected.plugin.TableTotalRows +flink_table_total_rows=io.datavines.metric.expected.plugin.TableTotalRows +======= +livy_table_total_rows=io.datavines.metric.expected.plugin.TableTotalRows +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-target-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-target-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index f93c9dcc6..87e388375 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-target-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-target-table-rows/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_target_table_total_rows=io.datavines.metric.expected.plugin.TargetTableTotalRows spark_target_table_total_rows=io.datavines.metric.expected.plugin.TargetTableTotalRows -livy_target_table_total_rows=io.datavines.metric.expected.plugin.TargetTableTotalRows \ No newline at end of file +<<<<<<< HEAD +livy_target_table_total_rows=io.datavines.metric.expected.plugin.TargetTableTotalRows +flink_target_table_total_rows=io.datavines.metric.expected.plugin.TargetTableTotalRows +======= +livy_target_table_total_rows=io.datavines.metric.expected.plugin.TargetTableTotalRows +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java index 08843d370..0212570cd 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java @@ -1,3 +1,71 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.metric.expected.plugin; + +import io.datavines.metric.api.ExpectedValue; + +import java.util.Map; + +import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY; + +public class WeeklyAvg implements ExpectedValue { + + @Override + public String getName() { + return "weekly_avg"; + } + + @Override + public String getKey(Map<String,String> inputParameter) { + String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); + return "expected_value_" + uniqueKey; + } + + @Override + public String getZhName() { + return "周均值"; + } + + @Override + public String getExecuteSql(Map<String,String> inputParameter) { + String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); + return "select round(avg(actual_value),2) as expected_value_" + uniqueKey + + " from dv_actual_values where data_time >= date_sub(${data_time},interval weekday(${data_time}) + 0 day)" + + " and data_time < date_add(date_format(${data_time},'%Y-%m-%d'),interval 1 DAY) and unique_code = ${unique_code}"; + } + + @Override + public String getOutputTable(Map<String,String> inputParameter) { + String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); + return "weekly_range_" + uniqueKey; + } + + @Override + public boolean isNeedDefaultDatasource() { + return true; + } + + @Override + public void prepare(Map<String, String> config) { + + } +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -64,3 +132,4 @@ public void prepare(Map<String, String> config) { } } +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue index d6784eaf2..c9c4dbb05 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/resources/META-INF/plugins/io.datavines.metric.api.ExpectedValue @@ -1,3 +1,8 @@ local_weekly_avg=io.datavines.metric.expected.plugin.WeeklyAvg spark_weekly_avg=io.datavines.metric.expected.plugin.SparkWeeklyAvg -livy_weekly_avg=io.datavines.metric.expected.plugin.SparkWeeklyAvg \ No newline at end of file +<<<<<<< HEAD +livy_weekly_avg=io.datavines.metric.expected.plugin.SparkWeeklyAvg +flink_weekly_avg=io.datavines.metric.expected.plugin.WeeklyAvg +======= +livy_weekly_avg=io.datavines.metric.expected.plugin.SparkWeeklyAvg +>>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java b/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java index 809cb1644..f538b51eb 100644 --- a/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java +++ b/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java @@ -1,3 +1,118 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.metric.plugin; + +import java.util.*; + +import io.datavines.common.config.CheckResult; +import io.datavines.common.config.ConfigChecker; +import io.datavines.common.entity.ExecuteSql; +import io.datavines.common.enums.DataVinesDataType; +import io.datavines.common.utils.StringUtils; +import io.datavines.metric.api.ConfigItem; +import io.datavines.metric.api.MetricDimension; +import io.datavines.metric.api.MetricType; +import io.datavines.metric.api.SqlMetric; + +import static io.datavines.common.CommonConstants.TABLE; +import static io.datavines.common.ConfigConstants.*; +import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY; + +public class CustomAggregateSql implements SqlMetric { + + private final Set<String> requiredOptions = new HashSet<>(); + + private final HashMap<String,ConfigItem> configMap = new HashMap<>(); + + public CustomAggregateSql() { + configMap.put("table",new ConfigItem("table", "表名", "table")); + configMap.put("actual_aggregate_sql", new ConfigItem("actual_aggregate_sql","自定义聚合SQL","actual_aggregate_sql")); + configMap.put("filter",new ConfigItem("filter", "过滤条件", "filter")); + + requiredOptions.add("actual_aggregate_sql"); + requiredOptions.add("table"); + } + + @Override + public String getName() { + return "custom_aggregate_sql"; + } + + @Override + public String getZhName() { + return "自定义聚合SQL"; + } + + @Override + public MetricDimension getDimension() { + return MetricDimension.ACCURACY; + } + + @Override + public MetricType getType() { + return MetricType.SINGLE_TABLE; + } + + @Override + public boolean isInvalidateItemsCanOutput() { + return false; + } + + @Override + public CheckResult validateConfig(Map<String, Object> config) { + return ConfigChecker.checkConfig(config, requiredOptions); + } + + @Override + public void prepare(Map<String, String> config) { + + } + + @Override + public Map<String, ConfigItem> getConfigMap() { + return configMap; + } + + @Override + public ExecuteSql getInvalidateItems(Map<String,String> inputParameter) { + return null; + } + + @Override + public ExecuteSql getActualValue(Map<String,String> inputParameter) { + inputParameter.put(ACTUAL_TABLE, inputParameter.get(TABLE)); + String actualAggregateSql = inputParameter.get(ACTUAL_AGGREGATE_SQL); + if (StringUtils.isNotEmpty(actualAggregateSql)) { + if (actualAggregateSql.contains("as actual_value")) { + actualAggregateSql = actualAggregateSql.replace("as actual_value", "as actual_value_" + inputParameter.get(METRIC_UNIQUE_KEY)); + } else if (actualAggregateSql.contains("AS actual_value")) { + actualAggregateSql = actualAggregateSql.replace("AS actual_value", "as actual_value_" + inputParameter.get(METRIC_UNIQUE_KEY)); + } + } + return new ExecuteSql(actualAggregateSql, inputParameter.get(TABLE)); + } + + @Override + public List<DataVinesDataType> suitableType() { + return Collections.emptyList(); + } +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -111,3 +226,4 @@ public List<DataVinesDataType> suitableType() { return Collections.emptyList(); } } +>>>>>>> upstream/dev diff --git a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java index a6642f441..96cf6ef44 100644 --- a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java +++ b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java @@ -70,7 +70,11 @@ public boolean acquire(String lockKey, long time) { RegistryLock registryLock = null; int count = 1; if (time > 0) { +<<<<<<< HEAD + count = Math.max(1, (int) (time * 1000 / LOCK_ACQUIRE_INTERVAL)); +======= count = Math.max(1, (int) (time * 1000 / LOCK_ACQUIRE_INTERVAL)); +>>>>>>> upstream/dev } while (count > 0) { try { @@ -82,7 +86,11 @@ public boolean acquire(String lockKey, long time) { try { clearExpireLock(); } catch (SQLException ex) { +<<<<<<< HEAD + log.error("clear expire lock error : ", ex); +======= log.error("clear expire lock error : ", ex); +>>>>>>> upstream/dev } ThreadUtils.sleep(LOCK_ACQUIRE_INTERVAL); count--; @@ -151,7 +159,11 @@ private void executeDelete(String key) throws SQLException { private boolean isExists(String key, ServerInfo serverInfo) throws SQLException { checkConnection(); +<<<<<<< HEAD + PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_registry_lock where lock_key=?"); +======= PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_registry_lock where lock_key=?", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); +>>>>>>> upstream/dev preparedStatement.setString(1, key); ResultSet resultSet = preparedStatement.executeQuery(); @@ -172,7 +184,11 @@ private void clearExpireLock() throws SQLException { preparedStatement.executeUpdate(); preparedStatement.close(); // 将超时的lockKey移除掉 +<<<<<<< HEAD + lockHoldMap.values().removeIf((v -> v.getUpdateTime().getTime() < (System.currentTimeMillis()- expireTimeWindow))); +======= lockHoldMap.values().removeIf((v -> v.getUpdateTime().getTime() < (System.currentTimeMillis() - expireTimeWindow))); +>>>>>>> upstream/dev } private void checkConnection() throws SQLException { diff --git a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java index 3908bc321..fc8b6de6a 100644 --- a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java +++ b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java @@ -47,10 +47,17 @@ public class MysqlServerStateManager { public MysqlServerStateManager(Connection connection, Properties properties) throws SQLException { this.connection = connection; this.properties = properties; +<<<<<<< HEAD + serverInfo = new ServerInfo(NetUtils.getHost(), Integer.valueOf((String) properties.get("server.port")), new Timestamp(System.currentTimeMillis()),new Timestamp(System.currentTimeMillis())); + ScheduledExecutorService executorService = Executors.newScheduledThreadPool(2); + executorService.scheduleAtFixedRate(new HeartBeater(),2,2, TimeUnit.SECONDS); + executorService.scheduleAtFixedRate(new ServerChecker(),5,10, TimeUnit.SECONDS); +======= serverInfo = new ServerInfo(NetUtils.getHost(), Integer.valueOf((String) properties.get("server.port")), new Timestamp(System.currentTimeMillis()), new Timestamp(System.currentTimeMillis())); ScheduledExecutorService executorService = Executors.newScheduledThreadPool(2); executorService.scheduleAtFixedRate(new HeartBeater(), 2, 2, TimeUnit.SECONDS); executorService.scheduleAtFixedRate(new ServerChecker(), 5, 10, TimeUnit.SECONDS); +>>>>>>> upstream/dev } public void registry(SubscribeListener subscribeListener) throws SQLException { @@ -71,14 +78,22 @@ public void unRegistry() throws SQLException { } public void refreshServer() throws SQLException { +<<<<<<< HEAD + ConcurrentHashMap<String,ServerInfo> newServers = fetchServers(); +======= ConcurrentHashMap<String, ServerInfo> newServers = fetchServers(); +>>>>>>> upstream/dev Set<String> offlineServer = new HashSet<>(); if (newServers == null) { //do nothing return; } Set<String> onlineServer = new HashSet<>(); +<<<<<<< HEAD + newServers.forEach((k, v) ->{ +======= newServers.forEach((k, v) -> { +>>>>>>> upstream/dev long updateTime = v.getUpdateTime().getTime(); long now = System.currentTimeMillis(); if (now - updateTime > 20000) { @@ -100,7 +115,11 @@ public void refreshServer() throws SQLException { if (!deadServers.contains(x) && !x.equals(serverInfo.getAddr())) { String[] values = x.split(":"); try { +<<<<<<< HEAD + executeDelete(new ServerInfo(values[0],Integer.valueOf(values[1]))); +======= executeDelete(new ServerInfo(values[0], Integer.valueOf(values[1]))); +>>>>>>> upstream/dev liveServerMap.remove(x); } catch (SQLException e) { log.error("delete server info error", e); @@ -131,7 +150,11 @@ public void refreshServer() throws SQLException { private void executeInsert(ServerInfo serverInfo) throws SQLException { checkConnection(); +<<<<<<< HEAD + PreparedStatement preparedStatement = connection.prepareStatement("insert into dv_server (host,port) values (?,?)"); +======= PreparedStatement preparedStatement = connection.prepareStatement("insert into dv_server (host, port) values (?, ?)"); +>>>>>>> upstream/dev preparedStatement.setString(1, serverInfo.getHost()); preparedStatement.setInt(2, serverInfo.getServerPort()); preparedStatement.executeUpdate(); @@ -159,7 +182,11 @@ private void executeDelete(ServerInfo serverInfo) throws SQLException { private boolean isExists(ServerInfo serverInfo) throws SQLException { checkConnection(); +<<<<<<< HEAD + PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_server where host=? and port=?"); +======= PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_server where host= ? and port= ?", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); +>>>>>>> upstream/dev preparedStatement.setString(1, serverInfo.getHost()); preparedStatement.setInt(2, serverInfo.getServerPort()); ResultSet resultSet = preparedStatement.executeQuery(); @@ -197,9 +224,15 @@ private ConcurrentHashMap<String, ServerInfo> fetchServers() throws SQLException return map; } +<<<<<<< HEAD + public List<ServerInfo> getActiveServerList(){ + List<ServerInfo> activeServerList = new ArrayList<>(); + liveServerMap.forEach((k,v)-> { +======= public List<ServerInfo> getActiveServerList() { List<ServerInfo> activeServerList = new ArrayList<>(); liveServerMap.forEach((k, v) -> { +>>>>>>> upstream/dev String[] values = k.split(":"); if (values.length == 2) { activeServerList.add(v); diff --git a/datavines-server/pom.xml b/datavines-server/pom.xml index 89d9a34b4..49055ca72 100644 --- a/datavines-server/pom.xml +++ b/datavines-server/pom.xml @@ -206,8 +206,13 @@ </dependency> <dependency> +<<<<<<< HEAD + <groupId>mysql</groupId> + <artifactId>mysql-connector-java</artifactId> +======= <groupId>com.mysql</groupId> <artifactId>mysql-connector-j</artifactId> +>>>>>>> upstream/dev </dependency> <dependency> @@ -301,7 +306,11 @@ <artifactId>datavines-engine-executor</artifactId> <version>${project.version}</version> </dependency> +<<<<<<< HEAD + +======= +>>>>>>> upstream/dev <dependency> <groupId>io.datavines</groupId> <artifactId>datavines-engine-spark-executor</artifactId> @@ -326,6 +335,25 @@ <version>${project.version}</version> <exclusions> <exclusion> +<<<<<<< HEAD + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-common</artifactId> + </exclusion> + <exclusion> +======= +>>>>>>> upstream/dev <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> @@ -338,6 +366,39 @@ <dependency> <groupId>io.datavines</groupId> +<<<<<<< HEAD + <artifactId>datavines-engine-flink-executor</artifactId> + <version>${project.version}</version> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-common</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>io.datavines</groupId> + <artifactId>datavines-engine-flink-core</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>io.datavines</groupId> +======= +>>>>>>> upstream/dev <artifactId>datavines-engine-local-config</artifactId> <version>${project.version}</version> </dependency> diff --git a/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java b/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java index d522c1734..6e304d057 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java +++ b/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java @@ -49,13 +49,25 @@ public void addInterceptors(InterceptorRegistry registry) { @Override public void addResourceHandlers(ResourceHandlerRegistry registry) { +<<<<<<< HEAD + registry.addResourceHandler("swagger-ui.html") + .addResourceLocations("classpath:/META-INF/resources/"); + + registry.addResourceHandler("/webjars/**") + .addResourceLocations("classpath:/META-INF/resources/webjars/"); +======= +>>>>>>> upstream/dev registry.addResourceHandler("/**") .addResourceLocations("classpath:/META-INF/resources/") .addResourceLocations("classpath:/static/") .addResourceLocations("classpath:/static/templates") +<<<<<<< HEAD + .addResourceLocations("classpath:/public/"); +======= .addResourceLocations("classpath:/public/") ; +>>>>>>> upstream/dev } @Override diff --git a/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java b/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java index 79ad90002..24399d105 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java +++ b/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java @@ -1,3 +1,128 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.server.api.controller; + +import io.datavines.core.aop.RefreshToken; +import io.datavines.core.constant.DataVinesConstants; +import io.datavines.core.exception.DataVinesServerException; +import io.datavines.common.entity.job.SubmitJob; +import io.datavines.server.api.dto.bo.job.JobExecutionDashboardParam; +import io.datavines.server.api.dto.bo.job.JobExecutionPageParam; +import io.datavines.server.api.dto.vo.JobExecutionResultVO; +import io.datavines.server.repository.entity.JobExecution; +import io.datavines.server.repository.service.JobExecutionErrorDataService; +import io.datavines.server.repository.service.JobExecutionResultService; +import io.datavines.server.repository.service.JobExecutionService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.*; + +import javax.validation.Valid; + +@Slf4j +@Api(value = "job", tags = "job", produces = MediaType.APPLICATION_JSON_VALUE) +@RestController +@RequestMapping(value = DataVinesConstants.BASE_API_PATH + "/job/execution", produces = MediaType.APPLICATION_JSON_VALUE) +@RefreshToken +public class JobExecutionController { + + @Autowired + private JobExecutionService jobExecutionService; + + @Autowired + private JobExecutionResultService jobExecutionResultService; + + @Autowired + private JobExecutionErrorDataService jobExecutionErrorDataService; + + @ApiOperation(value = "submit external data quality job", response = Long.class) + @PostMapping(value = "/submit/data-quality", consumes = MediaType.APPLICATION_JSON_VALUE) + public Object submitDataQualityJob(@Valid @RequestBody SubmitJob submitJob) throws DataVinesServerException { + return jobExecutionService.submitJob(submitJob); + } + + @ApiOperation(value = "submit external data reconciliation job", response = Long.class) + @PostMapping(value = "/submit/data-reconciliation", consumes = MediaType.APPLICATION_JSON_VALUE) + public Object submitDataReconJob(@Valid @RequestBody SubmitJob submitJob) throws DataVinesServerException { + return jobExecutionService.submitJob(submitJob); + } + + @ApiOperation(value = "kill job", response = Long.class) + @DeleteMapping(value = "/kill/{executionId}") + public Object killTask(@PathVariable("executionId") Long executionId) { + return jobExecutionService.killJob(executionId); + } + + @ApiOperation(value = "get job execution status", response = String.class) + @GetMapping(value = "/status/{executionId}") + public Object getTaskStatus(@PathVariable("executionId") Long executionId) { + return jobExecutionService.getById(executionId).getStatus().getDescription(); + } + + @ApiOperation(value = "get job execution list by job id", response = JobExecution.class, responseContainer = "list") + @GetMapping(value = "/list/{jobId}") + public Object getJobExecutionListByJobId(@PathVariable("jobId") Long jobId) { + return jobExecutionService.listByJobId(jobId); + } + + @Deprecated + @ApiOperation(value = "get job execution result", response = JobExecutionResultVO.class) + @GetMapping(value = "/result/{executionId}") + public Object getJobExecutionResultInfo(@PathVariable("executionId") Long executionId) { + return jobExecutionResultService.getResultVOByJobExecutionId(executionId); + } + + @ApiOperation(value = "get job execution result", response = JobExecutionResultVO.class) + @GetMapping(value = "/list/result/{executionId}") + public Object getJobExecutionResultInfoList(@PathVariable("executionId") Long executionId) { + return jobExecutionResultService.getResultVOListByJobExecutionId(executionId); + } + + @ApiOperation(value = "get job execution page", response = JobExecutionResultVO.class, responseContainer = "page") + @PostMapping(value = "/page") + public Object page(@Valid @RequestBody JobExecutionPageParam jobExecutionPageParam) { + return jobExecutionService.getJobExecutionPage(jobExecutionPageParam); + } + + @ApiOperation(value = "get job execution error data page", response = Object.class, responseContainer = "page") + @GetMapping(value = "/errorDataPage") + public Object readErrorDataPage(@RequestParam("taskId") Long taskId, + @RequestParam("pageNumber") Integer pageNumber, + @RequestParam("pageSize") Integer pageSize){ + return jobExecutionErrorDataService.readErrorDataPage(taskId, pageNumber, pageSize); + } + + @ApiOperation(value = "get job execution agg pie", response = JobExecutionResultVO.class) + @PostMapping(value = "/agg-pie") + public Object getExecutionAggPie(@Valid @RequestBody JobExecutionDashboardParam dashboardParam) { + return jobExecutionService.getJobExecutionAggPie(dashboardParam); + } + + @ApiOperation(value = "get job execution trend bar", response = JobExecutionResultVO.class) + @PostMapping(value = "/trend-bar") + public Object getExecutionTrendBar(@Valid @RequestBody JobExecutionDashboardParam dashboardParam) { + return jobExecutionService.getJobExecutionTrendBar(dashboardParam); + } +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -121,3 +246,4 @@ public Object getExecutionTrendBar(@Valid @RequestBody JobExecutionDashboardPara return jobExecutionService.getJobExecutionTrendBar(dashboardParam); } } +>>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java b/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java index 24bff4b23..19560b95d 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java +++ b/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java @@ -1,3 +1,88 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.server.api.controller; + +import io.datavines.common.entity.job.SubmitJob; +import io.datavines.core.aop.RefreshToken; +import io.datavines.core.constant.DataVinesConstants; +import io.datavines.core.exception.DataVinesServerException; +import io.datavines.server.api.dto.bo.job.JobExecutionDashboardParam; +import io.datavines.server.api.dto.bo.job.JobExecutionPageParam; +import io.datavines.server.api.dto.bo.job.JobQualityReportDashboardParam; +import io.datavines.server.api.dto.vo.JobExecutionResultVO; +import io.datavines.server.repository.entity.JobExecution; +import io.datavines.server.repository.service.JobExecutionErrorDataService; +import io.datavines.server.repository.service.JobExecutionResultService; +import io.datavines.server.repository.service.JobExecutionService; +import io.datavines.server.repository.service.JobQualityReportService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.*; + +import javax.validation.Valid; + +@Slf4j +@Api(value = "job-quality-report", tags = "job-quality-report", produces = MediaType.APPLICATION_JSON_VALUE) +@RestController +@RequestMapping(value = DataVinesConstants.BASE_API_PATH + "/job/quality-report", produces = MediaType.APPLICATION_JSON_VALUE) +@RefreshToken +public class JobQualityReportController { + + @Autowired + private JobExecutionService jobExecutionService; + + @Autowired + private JobExecutionResultService jobExecutionResultService; + + @Autowired + private JobExecutionErrorDataService jobExecutionErrorDataService; + + @Autowired + private JobQualityReportService jobQualityReportService; + + @ApiOperation(value = "get job quality report page", response = JobExecutionResultVO.class, responseContainer = "page") + @PostMapping(value = "/page") + public Object page(@Valid @RequestBody JobQualityReportDashboardParam dashboardParam) { + return jobQualityReportService.getQualityReportPage(dashboardParam); + } + + @ApiOperation(value = "get job quality report page", response = JobExecutionResultVO.class, responseContainer = "list") + @GetMapping(value = "/listColumnExecution") + public Object listColumnExecution(@RequestParam Long reportId) { + return jobQualityReportService.listColumnExecution(reportId); + } + + @ApiOperation(value = "get job quality report score", response = JobExecutionResultVO.class) + @PostMapping(value = "/score") + public Object getScoreByCondition(@Valid @RequestBody JobQualityReportDashboardParam dashboardParam) { + return jobQualityReportService.getScoreByCondition(dashboardParam); + } + + @ApiOperation(value = "get job quality report score trend", response = JobExecutionResultVO.class) + @PostMapping(value = "/score-trend") + public Object getScoreTrendByCondition(@Valid @RequestBody JobQualityReportDashboardParam dashboardParam) { + return jobQualityReportService.getScoreTrendByCondition(dashboardParam); + } +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -81,3 +166,4 @@ public Object getScoreTrendByCondition(@Valid @RequestBody JobQualityReportDashb return jobQualityReportService.getScoreTrendByCondition(dashboardParam); } } +>>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java b/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java index 197bdf6c8..e85c1f527 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java +++ b/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java @@ -158,7 +158,12 @@ public Object getExpectedTypeList(@PathVariable("type") String type) { afterFilterSet = expectedValueList.stream() .map(it ->it.replace("local_", "") .replace("spark_","") +<<<<<<< HEAD + .replace("livy_","") + .replace("flink_","")) +======= .replace("livy_","")) +>>>>>>> upstream/dev .collect(Collectors.toSet()); List<Item> items = new ArrayList<>(); diff --git a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java index 2d582fc87..e63e21e1f 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java +++ b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java @@ -1,3 +1,45 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.server.api.dto.bo.job; + +import lombok.Data; + +import javax.validation.constraints.NotNull; + +@Data +@NotNull(message = "JobExecutionPageParam cannot be null") +public class JobExecutionDashboardParam { + + private Long datasourceId; + + private String metricType; + + private String schemaName; + + private String tableName; + + private String columnName; + + private String startTime; + + private String endTime; +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -38,3 +80,4 @@ public class JobExecutionDashboardParam { private String endTime; } +>>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java index ad051507b..ded66df17 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java +++ b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java @@ -1,3 +1,61 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.server.api.dto.bo.job; + +import lombok.Data; +import javax.validation.constraints.NotNull; + +@Data +@NotNull(message = "JobExecutionPageParam cannot be null") +public class JobExecutionPageParam { + + private Long datasourceId; + + private Integer status; + + private String searchVal; + + private Long jobId; + + private String metricType; + + private String schemaName; + + private String tableName; + + private String columnName; + + private String startTime; + + private String endTime; + + private Integer pageNumber; + + private Integer pageSize; + + private String schemaSearch; + + private String tableSearch; + + private String columnSearch; + +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -54,3 +112,4 @@ public class JobExecutionPageParam { private String columnSearch; } +>>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java index f5616205e..3609cab2b 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java +++ b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java @@ -1,3 +1,48 @@ +<<<<<<< HEAD +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.datavines.server.api.dto.bo.job; + +import lombok.Data; + +import javax.validation.constraints.NotNull; + +@Data +@NotNull(message = "JobQualityReportDashboardParam cannot be null") +public class JobQualityReportDashboardParam { + + @NotNull(message = "datasourceId can not be null") + private Long datasourceId; + + private String schemaName; + + private String tableName; + + private String startTime; + + private String endTime; + + private String reportDate; + + private int pageNumber; + + private int pageSize; +} +======= /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -41,3 +86,4 @@ public class JobQualityReportDashboardParam { private int pageSize; } +>>>>>>> upstream/dev diff --git a/datavines-server/src/main/resources/application.yaml b/datavines-server/src/main/resources/application.yaml index 383521ff5..9ef555ac3 100644 --- a/datavines-server/src/main/resources/application.yaml +++ b/datavines-server/src/main/resources/application.yaml @@ -90,6 +90,20 @@ spring: url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=Asia/Shanghai username: root password: 123456 +<<<<<<< HEAD + hikari: + connection-test-query: select 1 + minimum-idle: 5 + auto-commit: true + validation-timeout: 3000 + pool-name: datavines + maximum-pool-size: 50 + connection-timeout: 30000 + idle-timeout: 600000 + leak-detection-threshold: 0 + initialization-fail-timeout: 1 +======= +>>>>>>> upstream/dev quartz: properties: org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate \ No newline at end of file diff --git a/datavines-ui/Editor/components/JobConfig/index.tsx b/datavines-ui/Editor/components/JobConfig/index.tsx new file mode 100644 index 000000000..f7d3a0544 --- /dev/null +++ b/datavines-ui/Editor/components/JobConfig/index.tsx @@ -0,0 +1,75 @@ +import React, { useState } from 'react'; +import { Form, Select } from 'antd'; +import { useIntl } from 'react-intl'; +import FlinkConfig from '../FlinkConfig'; + +const { Option } = Select; + +// 执行引擎选项 +const ENGINE_TYPES = [ + { label: 'Flink', value: 'flink_single_table' } +]; + +interface JobConfigProps { + onChange?: (config: any) => void; + initialValues?: any; +} + +const JobConfig: React.FC<JobConfigProps> = ({ onChange, initialValues = {} }) => { + const intl = useIntl(); + const [form] = Form.useForm(); + const [currentEngineType, setCurrentEngineType] = useState(initialValues.engineType || 'flink_single_table'); + + const handleEngineTypeChange = (type: string) => { + setCurrentEngineType(type); + onChange?.({ + ...initialValues, + engineType: type, + engineConfig: {} + }); + }; + + const handleConfigChange = (config: any) => { + onChange?.({ + ...initialValues, + engineType: currentEngineType, + engineConfig: config + }); + }; + + return ( + <div> + <Form + form={form} + layout="vertical" + initialValues={{ + engineType: currentEngineType + }} + > + <Form.Item + label={intl.formatMessage({ id: 'dv_metric_title_actuator_engine' })} + name="engineType" + rules={[{ required: true, message: intl.formatMessage({ id: 'common_required_tip' }) }]} + > + <Select onChange={handleEngineTypeChange}> + {ENGINE_TYPES.map(engine => ( + <Option key={engine.value} value={engine.value}> + {engine.label} + </Option> + ))} + </Select> + </Form.Item> + </Form> + + {currentEngineType === 'flink_single_table' && ( + <FlinkConfig + initialValues={initialValues?.engineConfig} + onChange={handleConfigChange} + engineType={currentEngineType} + /> + )} + </div> + ); +}; + +export default JobConfig; diff --git a/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx b/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx index 12890671c..f1b20a355 100644 --- a/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx +++ b/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx @@ -123,10 +123,17 @@ const Index = ({ form, id, detail }: InnerProps) => { <Form.Item noStyle dependencies={['engineType']}> {() => { const value = form.getFieldValue('engineType'); +<<<<<<< HEAD + if (value === 'spark' || value === 'livy' || value === 'flink') { + return render(); + } + return null; +======= if (value !== 'spark' && value !== 'livy') { return null; } return render(); +>>>>>>> upstream/dev }} </Form.Item> ); diff --git a/datavines-ui/Editor/locale/en_US.ts b/datavines-ui/Editor/locale/en_US.ts index 4e46d1ab6..60b01cd83 100644 --- a/datavines-ui/Editor/locale/en_US.ts +++ b/datavines-ui/Editor/locale/en_US.ts @@ -35,8 +35,16 @@ export default { dv_metric_actuator_executor_numbers: 'Number of executors', dv_metric_actuator_executor_memory: 'Number of executor memory', dv_metric_actuator_executor_cores: 'Number of executor cores', +<<<<<<< HEAD + dv_metric_actuator_executor_options: 'Options Parameters', + dv_metric_actuator_parallelism: 'Parallelism', + dv_metric_actuator_job_name: 'Job Name', + dv_metric_actuator_yarn_queue: 'Yarn Queue', + dv_metric_linux_user: 'Linux User', +======= dv_metric_actuator_executor_options: 'Options', dv_metric_linux_user: 'Linux user', +>>>>>>> upstream/dev dv_metric_create_time: 'Create time', dv_metric_update_time: 'Update time', dv_metric_name: 'Name', diff --git a/datavines-ui/Editor/locale/zh_CN.ts b/datavines-ui/Editor/locale/zh_CN.ts index 2477389ac..baf64bb86 100644 --- a/datavines-ui/Editor/locale/zh_CN.ts +++ b/datavines-ui/Editor/locale/zh_CN.ts @@ -36,6 +36,12 @@ export default { dv_metric_actuator_executor_memory: 'Executor内存数', dv_metric_actuator_executor_cores: 'Executor核心数', dv_metric_actuator_executor_options: '选项参数', +<<<<<<< HEAD + dv_metric_actuator_parallelism: '并行度', + dv_metric_actuator_job_name: '作业名称', + dv_metric_actuator_yarn_queue: 'Yarn队列', +======= +>>>>>>> upstream/dev dv_metric_linux_user: 'Linux用户', dv_metric_create_time: '创建时间', dv_metric_update_time: '更新时间', diff --git a/deploy/compose/docker-compose.yaml b/deploy/compose/docker-compose.yaml index 8c3dfa795..733d8f03f 100644 --- a/deploy/compose/docker-compose.yaml +++ b/deploy/compose/docker-compose.yaml @@ -1,7 +1,11 @@ version: '3.8' services: datavines: +<<<<<<< HEAD + image: datavines:latest +======= image: datavines:dev +>>>>>>> upstream/dev container_name: datavines ports: - 5600:5600 diff --git a/deploy/docker/Dockerfile b/deploy/docker/Dockerfile index d1180b0ca..bddf3570d 100644 --- a/deploy/docker/Dockerfile +++ b/deploy/docker/Dockerfile @@ -1,3 +1,15 @@ +<<<<<<< HEAD +FROM openjdk:8 +LABEL "author"="735140144" +WORKDIR /datavines +COPY ./datavines-1.0.0-SNAPSHOT-bin.tar.gz /datavines +RUN tar -zxvf datavines-1.0.0-SNAPSHOT-bin.tar.gz +RUN chmod +x datavines-1.0.0-SNAPSHOT-bin/bin/datavines-daemon.sh +ENV TZ=Asia/Shanghai +ENV LANG=zh_CN.UTF-8 +EXPOSE 5600 +CMD ["datavines-1.0.0-SNAPSHOT-bin/bin/datavines-daemon.sh","start_container",""] +======= # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -42,3 +54,4 @@ RUN chmod +x datavines/bin/datavines-daemon.sh && sed -i 's/\r//g' datavines/bin EXPOSE 5600 CMD ["/usr/bin/tini", "--", "datavines/bin/datavines-daemon.sh", "start_container", ""] +>>>>>>> upstream/dev From 33ee87697e25ca1460c40c7a7d022e3ba10e3cc6 Mon Sep 17 00:00:00 2001 From: GSHF <18663587295@sohu.com> Date: Sat, 28 Dec 2024 22:27:16 +0800 Subject: [PATCH 4/5] fix: fix build issues and syntax errors --- .../io/datavines/common/ConfigConstants.java | 9 +- .../io/datavines/common/config/Config.java | 9 -- .../config/BaseJobConfigurationBuilder.java | 8 -- .../metric/expected/plugin/Last30DayAvg.java | 69 ---------- .../metric/expected/plugin/WeeklyAvg.java | 69 ---------- .../metric/plugin/CustomAggregateSql.java | 116 ---------------- .../datavines/registry/plugin/MysqlMutex.java | 16 --- .../plugin/MysqlServerStateManager.java | 77 +++-------- datavines-server/pom.xml | 51 ++----- .../server/api/config/WebMvcConfig.java | 8 -- .../controller/JobExecutionController.java | 126 ------------------ .../JobQualityReportController.java | 86 ------------ .../api/controller/MetricController.java | 5 - .../bo/job/JobExecutionDashboardParam.java | 43 ------ .../api/dto/bo/job/JobExecutionPageParam.java | 59 -------- .../job/JobQualityReportDashboardParam.java | 46 ------- 16 files changed, 28 insertions(+), 769 deletions(-) diff --git a/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java b/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java index 3ccee963e..02d479a18 100644 --- a/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java +++ b/datavines-common/src/main/java/io/datavines/common/ConfigConstants.java @@ -107,11 +107,7 @@ public class ConfigConstants { public static final String OUTPUT_TABLE = "output_table"; public static final String TMP_TABLE = "tmp_table"; public static final String COLUMN_SEPARATOR = "column_separator"; -<<<<<<< HEAD public static final String LINE_SEPARATOR = "line_separator"; -======= - public static final String LINE_SEPERATOR = "line_separator"; ->>>>>>> upstream/dev public static final String DATA_DIR = "data_dir"; public static final String ENABLE_SPARK_HIVE_SUPPORT = "enable_spark_hive_support"; @@ -173,11 +169,8 @@ public class ConfigConstants { public static final String SAVE_MODE = "save_mode"; public static final String UPSERT = "Upsert"; -<<<<<<< HEAD public static final String FILE_NAME = "file_name"; - + public static final String FLINK = "flink"; -======= ->>>>>>> upstream/dev } diff --git a/datavines-common/src/main/java/io/datavines/common/config/Config.java b/datavines-common/src/main/java/io/datavines/common/config/Config.java index 7994c7c2a..feaca2878 100644 --- a/datavines-common/src/main/java/io/datavines/common/config/Config.java +++ b/datavines-common/src/main/java/io/datavines/common/config/Config.java @@ -43,14 +43,11 @@ public String getString(String key){ return String.valueOf(config.get(key)); } -<<<<<<< HEAD public String getString(String key, String defaultValue) { Object value = config.get(key); return value != null ? String.valueOf(value) : defaultValue; } -======= ->>>>>>> upstream/dev public List<String> getStringList(String key){ return (List<String>)config.get(key); } @@ -59,14 +56,11 @@ public Integer getInt(String key){ return Integer.valueOf(String.valueOf(config.get(key))); } -<<<<<<< HEAD public Integer getInt(String key, Integer defaultValue) { Object value = config.get(key); return value != null ? Integer.valueOf(String.valueOf(value)) : defaultValue; } -======= ->>>>>>> upstream/dev public Boolean getBoolean(String key){ return Boolean.valueOf(String.valueOf(config.get(key))); } @@ -79,14 +73,11 @@ public Long getLong(String key){ return Long.valueOf(String.valueOf(config.get(key))); } -<<<<<<< HEAD public Long getLong(String key, Long defaultValue) { Object value = config.get(key); return value != null ? Long.valueOf(String.valueOf(value)) : defaultValue; } -======= ->>>>>>> upstream/dev public Boolean has(String key) { return config.get(key) != null; } diff --git a/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java b/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java index 99912111a..8b0334428 100644 --- a/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java +++ b/datavines-engine/datavines-engine-config/src/main/java/io/datavines/engine/config/BaseJobConfigurationBuilder.java @@ -70,21 +70,13 @@ public void init(Map<String, String> inputParameter, JobExecutionInfo jobExecuti this.inputParameter.put(COLUMN_SEPARATOR, errorDataParameterMap.get(CommonPropertyUtils.COLUMN_SEPARATOR) == null ? CommonPropertyUtils.COLUMN_SEPARATOR_DEFAULT : errorDataParameterMap.get(CommonPropertyUtils.COLUMN_SEPARATOR)); -<<<<<<< HEAD this.inputParameter.put(LINE_SEPARATOR, -======= - this.inputParameter.put(LINE_SEPERATOR, ->>>>>>> upstream/dev errorDataParameterMap.get(CommonPropertyUtils.LINE_SEPARATOR) == null ? CommonPropertyUtils.LINE_SEPARATOR_DEFAULT : errorDataParameterMap.get(CommonPropertyUtils.LINE_SEPARATOR)); } else { this.inputParameter.put(ERROR_DATA_DIR, CommonPropertyUtils.getString(CommonPropertyUtils.ERROR_DATA_DIR, CommonPropertyUtils.ERROR_DATA_DIR_DEFAULT)); this.inputParameter.put(COLUMN_SEPARATOR, CommonPropertyUtils.getString(CommonPropertyUtils.COLUMN_SEPARATOR, CommonPropertyUtils.COLUMN_SEPARATOR_DEFAULT)); -<<<<<<< HEAD this.inputParameter.put(LINE_SEPARATOR, CommonPropertyUtils.getString(CommonPropertyUtils.LINE_SEPARATOR, CommonPropertyUtils.LINE_SEPARATOR_DEFAULT)); -======= - this.inputParameter.put(LINE_SEPERATOR, CommonPropertyUtils.getString(CommonPropertyUtils.LINE_SEPARATOR, CommonPropertyUtils.LINE_SEPARATOR_DEFAULT)); ->>>>>>> upstream/dev } if (FILE.equalsIgnoreCase(jobExecutionInfo.getValidateResultDataStorageType())) { diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java index 9796d2b92..9b07679a6 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-last30day-avg/src/main/java/io/datavines/metric/expected/plugin/Last30DayAvg.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -65,71 +64,3 @@ public void prepare(Map<String, String> config) { } } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.metric.expected.plugin; - -import io.datavines.metric.api.ExpectedValue; - -import java.util.Map; - -import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY; - -public class Last30DayAvg implements ExpectedValue { - - @Override - public String getName() { - return "last_30d_avg"; - } - - @Override - public String getZhName() { - return "最近30天均值"; - } - - @Override - public String getKey(Map<String,String> inputParameter) { - String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); - return "expected_value_" + uniqueKey; - } - - @Override - public String getExecuteSql(Map<String,String> inputParameter) { - String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); - return "select round(avg(actual_value),2) as expected_value_" + uniqueKey + - " from dv_actual_values where data_time >= date_sub(date_format(${data_time},'%Y-%m-%d'),interval 30 DAY)" + - " and data_time < date_add(date_format(${data_time},'%Y-%m-%d'),interval 1 DAY) and unique_code = ${unique_code}"; - } - - @Override - public String getOutputTable(Map<String,String> inputParameter) { - String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); - return "last_30d_" + uniqueKey; - } - - @Override - public boolean isNeedDefaultDatasource() { - return true; - } - - @Override - public void prepare(Map<String, String> config) { - - } -} ->>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java index 0212570cd..9ab7b91d2 100644 --- a/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java +++ b/datavines-metric/datavines-metric-expected-plugins/datavines-metric-expected-weekly-avg/src/main/java/io/datavines/metric/expected/plugin/WeeklyAvg.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -65,71 +64,3 @@ public void prepare(Map<String, String> config) { } } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.metric.expected.plugin; - -import io.datavines.metric.api.ExpectedValue; - -import java.util.Map; - -import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY; - -public class WeeklyAvg implements ExpectedValue { - - @Override - public String getName() { - return "weekly_avg"; - } - - @Override - public String getKey(Map<String,String> inputParameter) { - String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); - return "expected_value_" + uniqueKey; - } - - @Override - public String getZhName() { - return "周均值"; - } - - @Override - public String getExecuteSql(Map<String,String> inputParameter) { - String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); - return "select round(avg(actual_value),2) as expected_value_" + uniqueKey + - " from dv_actual_values where data_time >= date_sub(${data_time},interval weekday(${data_time}) + 0 day)" + - " and data_time < date_add(date_format(${data_time},'%Y-%m-%d'),interval 1 DAY) and unique_code = ${unique_code}"; - } - - @Override - public String getOutputTable(Map<String,String> inputParameter) { - String uniqueKey = inputParameter.get(METRIC_UNIQUE_KEY); - return "weekly_range_" + uniqueKey; - } - - @Override - public boolean isNeedDefaultDatasource() { - return true; - } - - @Override - public void prepare(Map<String, String> config) { - - } -} ->>>>>>> upstream/dev diff --git a/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java b/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java index f538b51eb..092e7c213 100644 --- a/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java +++ b/datavines-metric/datavines-metric-plugins/datavines-metric-custom-aggregate-sql/src/main/java/io/datavines/metric/plugin/CustomAggregateSql.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -112,118 +111,3 @@ public List<DataVinesDataType> suitableType() { return Collections.emptyList(); } } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.metric.plugin; - -import java.util.*; - -import io.datavines.common.config.CheckResult; -import io.datavines.common.config.ConfigChecker; -import io.datavines.common.entity.ExecuteSql; -import io.datavines.common.enums.DataVinesDataType; -import io.datavines.common.utils.StringUtils; -import io.datavines.metric.api.ConfigItem; -import io.datavines.metric.api.MetricDimension; -import io.datavines.metric.api.MetricType; -import io.datavines.metric.api.SqlMetric; - -import static io.datavines.common.CommonConstants.TABLE; -import static io.datavines.common.ConfigConstants.*; -import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY; - -public class CustomAggregateSql implements SqlMetric { - - private final Set<String> requiredOptions = new HashSet<>(); - - private final HashMap<String,ConfigItem> configMap = new HashMap<>(); - - public CustomAggregateSql() { - configMap.put("table",new ConfigItem("table", "表名", "table")); - configMap.put("actual_aggregate_sql", new ConfigItem("actual_aggregate_sql","自定义聚合SQL","actual_aggregate_sql")); - configMap.put("filter",new ConfigItem("filter", "过滤条件", "filter")); - - requiredOptions.add("actual_aggregate_sql"); - requiredOptions.add("table"); - } - - @Override - public String getName() { - return "custom_aggregate_sql"; - } - - @Override - public String getZhName() { - return "自定义聚合SQL"; - } - - @Override - public MetricDimension getDimension() { - return MetricDimension.ACCURACY; - } - - @Override - public MetricType getType() { - return MetricType.SINGLE_TABLE; - } - - @Override - public boolean isInvalidateItemsCanOutput() { - return false; - } - - @Override - public CheckResult validateConfig(Map<String, Object> config) { - return ConfigChecker.checkConfig(config, requiredOptions); - } - - @Override - public void prepare(Map<String, String> config) { - - } - - @Override - public Map<String, ConfigItem> getConfigMap() { - return configMap; - } - - @Override - public ExecuteSql getInvalidateItems(Map<String,String> inputParameter) { - return null; - } - - @Override - public ExecuteSql getActualValue(Map<String,String> inputParameter) { - inputParameter.put(ACTUAL_TABLE, inputParameter.get(TABLE)); - String actualAggregateSql = inputParameter.get(ACTUAL_AGGREGATE_SQL); - if (StringUtils.isNotEmpty(actualAggregateSql)) { - if (actualAggregateSql.contains("as actual_value")) { - actualAggregateSql = actualAggregateSql.replace("as actual_value", "as actual_value_" + inputParameter.get(METRIC_UNIQUE_KEY)); - } else if (actualAggregateSql.contains("AS actual_value")) { - actualAggregateSql = actualAggregateSql.replace("AS actual_value", "as actual_value_" + inputParameter.get(METRIC_UNIQUE_KEY)); - } - } - return new ExecuteSql(actualAggregateSql, inputParameter.get(TABLE)); - } - - @Override - public List<DataVinesDataType> suitableType() { - return Collections.emptyList(); - } -} ->>>>>>> upstream/dev diff --git a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java index 96cf6ef44..a6642f441 100644 --- a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java +++ b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlMutex.java @@ -70,11 +70,7 @@ public boolean acquire(String lockKey, long time) { RegistryLock registryLock = null; int count = 1; if (time > 0) { -<<<<<<< HEAD - count = Math.max(1, (int) (time * 1000 / LOCK_ACQUIRE_INTERVAL)); -======= count = Math.max(1, (int) (time * 1000 / LOCK_ACQUIRE_INTERVAL)); ->>>>>>> upstream/dev } while (count > 0) { try { @@ -86,11 +82,7 @@ public boolean acquire(String lockKey, long time) { try { clearExpireLock(); } catch (SQLException ex) { -<<<<<<< HEAD - log.error("clear expire lock error : ", ex); -======= log.error("clear expire lock error : ", ex); ->>>>>>> upstream/dev } ThreadUtils.sleep(LOCK_ACQUIRE_INTERVAL); count--; @@ -159,11 +151,7 @@ private void executeDelete(String key) throws SQLException { private boolean isExists(String key, ServerInfo serverInfo) throws SQLException { checkConnection(); -<<<<<<< HEAD - PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_registry_lock where lock_key=?"); -======= PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_registry_lock where lock_key=?", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ->>>>>>> upstream/dev preparedStatement.setString(1, key); ResultSet resultSet = preparedStatement.executeQuery(); @@ -184,11 +172,7 @@ private void clearExpireLock() throws SQLException { preparedStatement.executeUpdate(); preparedStatement.close(); // 将超时的lockKey移除掉 -<<<<<<< HEAD - lockHoldMap.values().removeIf((v -> v.getUpdateTime().getTime() < (System.currentTimeMillis()- expireTimeWindow))); -======= lockHoldMap.values().removeIf((v -> v.getUpdateTime().getTime() < (System.currentTimeMillis() - expireTimeWindow))); ->>>>>>> upstream/dev } private void checkConnection() throws SQLException { diff --git a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java index fc8b6de6a..1194441af 100644 --- a/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java +++ b/datavines-registry/datavines-registry-plugins/datavines-registry-mysql/src/main/java/io/datavines/registry/plugin/MysqlServerStateManager.java @@ -47,17 +47,10 @@ public class MysqlServerStateManager { public MysqlServerStateManager(Connection connection, Properties properties) throws SQLException { this.connection = connection; this.properties = properties; -<<<<<<< HEAD - serverInfo = new ServerInfo(NetUtils.getHost(), Integer.valueOf((String) properties.get("server.port")), new Timestamp(System.currentTimeMillis()),new Timestamp(System.currentTimeMillis())); - ScheduledExecutorService executorService = Executors.newScheduledThreadPool(2); - executorService.scheduleAtFixedRate(new HeartBeater(),2,2, TimeUnit.SECONDS); - executorService.scheduleAtFixedRate(new ServerChecker(),5,10, TimeUnit.SECONDS); -======= serverInfo = new ServerInfo(NetUtils.getHost(), Integer.valueOf((String) properties.get("server.port")), new Timestamp(System.currentTimeMillis()), new Timestamp(System.currentTimeMillis())); ScheduledExecutorService executorService = Executors.newScheduledThreadPool(2); executorService.scheduleAtFixedRate(new HeartBeater(), 2, 2, TimeUnit.SECONDS); executorService.scheduleAtFixedRate(new ServerChecker(), 5, 10, TimeUnit.SECONDS); ->>>>>>> upstream/dev } public void registry(SubscribeListener subscribeListener) throws SQLException { @@ -78,22 +71,14 @@ public void unRegistry() throws SQLException { } public void refreshServer() throws SQLException { -<<<<<<< HEAD - ConcurrentHashMap<String,ServerInfo> newServers = fetchServers(); -======= ConcurrentHashMap<String, ServerInfo> newServers = fetchServers(); ->>>>>>> upstream/dev Set<String> offlineServer = new HashSet<>(); if (newServers == null) { //do nothing return; } Set<String> onlineServer = new HashSet<>(); -<<<<<<< HEAD - newServers.forEach((k, v) ->{ -======= newServers.forEach((k, v) -> { ->>>>>>> upstream/dev long updateTime = v.getUpdateTime().getTime(); long now = System.currentTimeMillis(); if (now - updateTime > 20000) { @@ -115,11 +100,7 @@ public void refreshServer() throws SQLException { if (!deadServers.contains(x) && !x.equals(serverInfo.getAddr())) { String[] values = x.split(":"); try { -<<<<<<< HEAD - executeDelete(new ServerInfo(values[0],Integer.valueOf(values[1]))); -======= executeDelete(new ServerInfo(values[0], Integer.valueOf(values[1]))); ->>>>>>> upstream/dev liveServerMap.remove(x); } catch (SQLException e) { log.error("delete server info error", e); @@ -150,11 +131,7 @@ public void refreshServer() throws SQLException { private void executeInsert(ServerInfo serverInfo) throws SQLException { checkConnection(); -<<<<<<< HEAD - PreparedStatement preparedStatement = connection.prepareStatement("insert into dv_server (host,port) values (?,?)"); -======= PreparedStatement preparedStatement = connection.prepareStatement("insert into dv_server (host, port) values (?, ?)"); ->>>>>>> upstream/dev preparedStatement.setString(1, serverInfo.getHost()); preparedStatement.setInt(2, serverInfo.getServerPort()); preparedStatement.executeUpdate(); @@ -182,11 +159,7 @@ private void executeDelete(ServerInfo serverInfo) throws SQLException { private boolean isExists(ServerInfo serverInfo) throws SQLException { checkConnection(); -<<<<<<< HEAD - PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_server where host=? and port=?"); -======= PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_server where host= ? and port= ?", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ->>>>>>> upstream/dev preparedStatement.setString(1, serverInfo.getHost()); preparedStatement.setInt(2, serverInfo.getServerPort()); ResultSet resultSet = preparedStatement.executeQuery(); @@ -203,41 +176,31 @@ private boolean isExists(ServerInfo serverInfo) throws SQLException { private ConcurrentHashMap<String, ServerInfo> fetchServers() throws SQLException { checkConnection(); + ConcurrentHashMap<String, ServerInfo> serverMap = new ConcurrentHashMap<>(); PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_server"); ResultSet resultSet = preparedStatement.executeQuery(); - if (resultSet == null) { - preparedStatement.close(); - return null; - } - - ConcurrentHashMap<String, ServerInfo> map = new ConcurrentHashMap<>(); while (resultSet.next()) { String host = resultSet.getString("host"); - int port = resultSet.getInt("port"); - Timestamp updateTime = resultSet.getTimestamp("update_time"); + Integer port = resultSet.getInt("port"); Timestamp createTime = resultSet.getTimestamp("create_time"); - map.put(host + ":" + port, new ServerInfo(host, port, createTime, updateTime)); + Timestamp updateTime = resultSet.getTimestamp("update_time"); + ServerInfo serverInfo = new ServerInfo(host, port, createTime, updateTime); + serverMap.put(serverInfo.getAddr(), serverInfo); } + resultSet.close(); preparedStatement.close(); - return map; + return serverMap; } -<<<<<<< HEAD - public List<ServerInfo> getActiveServerList(){ - List<ServerInfo> activeServerList = new ArrayList<>(); - liveServerMap.forEach((k,v)-> { -======= public List<ServerInfo> getActiveServerList() { List<ServerInfo> activeServerList = new ArrayList<>(); liveServerMap.forEach((k, v) -> { ->>>>>>> upstream/dev String[] values = k.split(":"); if (values.length == 2) { activeServerList.add(v); } - }); return activeServerList; } @@ -246,32 +209,26 @@ class HeartBeater implements Runnable { @Override public void run() { - if (Stopper.isRunning()) { - try { - if (isExists(serverInfo)) { - executeUpdate(serverInfo); - } else { - executeInsert(serverInfo); - } - } catch (SQLException e) { - log.error("heartbeat error", e); + try { + if (Stopper.isRunning()) { + executeUpdate(serverInfo); } + } catch (SQLException e) { + log.error("heartbeat error", e); } } } - class ServerChecker implements Runnable { @Override public void run() { - - if (Stopper.isRunning()) { - try { + try { + if (Stopper.isRunning()) { refreshServer(); - } catch (SQLException e) { - log.error("server check error", e); } + } catch (SQLException e) { + log.error("server check error", e); } } } @@ -283,7 +240,7 @@ private void checkConnection() throws SQLException { } public void close() throws SQLException { - if (connection != null && !connection.isClosed()) { + if (connection != null) { connection.close(); } } diff --git a/datavines-server/pom.xml b/datavines-server/pom.xml index 49055ca72..90607d9c7 100644 --- a/datavines-server/pom.xml +++ b/datavines-server/pom.xml @@ -1,21 +1,21 @@ <?xml version="1.0" encoding="UTF-8"?> <!-- - + Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 - + Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - + --> <project xmlns="http://maven.apache.org/POM/4.0.0" @@ -206,13 +206,8 @@ </dependency> <dependency> -<<<<<<< HEAD - <groupId>mysql</groupId> - <artifactId>mysql-connector-java</artifactId> -======= <groupId>com.mysql</groupId> <artifactId>mysql-connector-j</artifactId> ->>>>>>> upstream/dev </dependency> <dependency> @@ -306,11 +301,7 @@ <artifactId>datavines-engine-executor</artifactId> <version>${project.version}</version> </dependency> -<<<<<<< HEAD - -======= ->>>>>>> upstream/dev <dependency> <groupId>io.datavines</groupId> <artifactId>datavines-engine-spark-executor</artifactId> @@ -335,25 +326,6 @@ <version>${project.version}</version> <exclusions> <exclusion> -<<<<<<< HEAD - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - </exclusion> - <exclusion> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - </exclusion> - <exclusion> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - </exclusion> - <exclusion> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-common</artifactId> - </exclusion> - <exclusion> -======= ->>>>>>> upstream/dev <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> @@ -366,7 +338,12 @@ <dependency> <groupId>io.datavines</groupId> -<<<<<<< HEAD + <artifactId>datavines-engine-local-config</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>io.datavines</groupId> <artifactId>datavines-engine-flink-executor</artifactId> <version>${project.version}</version> <exclusions> @@ -395,14 +372,6 @@ <version>${project.version}</version> </dependency> - <dependency> - <groupId>io.datavines</groupId> -======= ->>>>>>> upstream/dev - <artifactId>datavines-engine-local-config</artifactId> - <version>${project.version}</version> - </dependency> - <dependency> <groupId>io.datavines</groupId> <artifactId>datavines-connector-all</artifactId> diff --git a/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java b/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java index 6e304d057..9c3a98e2e 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java +++ b/datavines-server/src/main/java/io/datavines/server/api/config/WebMvcConfig.java @@ -49,25 +49,17 @@ public void addInterceptors(InterceptorRegistry registry) { @Override public void addResourceHandlers(ResourceHandlerRegistry registry) { -<<<<<<< HEAD registry.addResourceHandler("swagger-ui.html") .addResourceLocations("classpath:/META-INF/resources/"); registry.addResourceHandler("/webjars/**") .addResourceLocations("classpath:/META-INF/resources/webjars/"); -======= ->>>>>>> upstream/dev registry.addResourceHandler("/**") .addResourceLocations("classpath:/META-INF/resources/") .addResourceLocations("classpath:/static/") .addResourceLocations("classpath:/static/templates") -<<<<<<< HEAD .addResourceLocations("classpath:/public/"); -======= - .addResourceLocations("classpath:/public/") - ; ->>>>>>> upstream/dev } @Override diff --git a/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java b/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java index 24399d105..286d9aef2 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java +++ b/datavines-server/src/main/java/io/datavines/server/api/controller/JobExecutionController.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -122,128 +121,3 @@ public Object getExecutionTrendBar(@Valid @RequestBody JobExecutionDashboardPara return jobExecutionService.getJobExecutionTrendBar(dashboardParam); } } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.server.api.controller; - -import io.datavines.core.aop.RefreshToken; -import io.datavines.core.constant.DataVinesConstants; -import io.datavines.core.exception.DataVinesServerException; -import io.datavines.common.entity.job.SubmitJob; -import io.datavines.server.api.dto.bo.job.JobExecutionDashboardParam; -import io.datavines.server.api.dto.bo.job.JobExecutionPageParam; -import io.datavines.server.api.dto.vo.JobExecutionResultVO; -import io.datavines.server.repository.entity.JobExecution; -import io.datavines.server.repository.service.JobExecutionErrorDataService; -import io.datavines.server.repository.service.JobExecutionResultService; -import io.datavines.server.repository.service.JobExecutionService; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.*; - -import javax.validation.Valid; - -@Slf4j -@Api(value = "job", tags = "job", produces = MediaType.APPLICATION_JSON_VALUE) -@RestController -@RequestMapping(value = DataVinesConstants.BASE_API_PATH + "/job/execution", produces = MediaType.APPLICATION_JSON_VALUE) -@RefreshToken -public class JobExecutionController { - - @Autowired - private JobExecutionService jobExecutionService; - - @Autowired - private JobExecutionResultService jobExecutionResultService; - - @Autowired - private JobExecutionErrorDataService jobExecutionErrorDataService; - - @ApiOperation(value = "submit external data quality job", response = Long.class) - @PostMapping(value = "/submit/data-quality", consumes = MediaType.APPLICATION_JSON_VALUE) - public Object submitDataQualityJob(@Valid @RequestBody SubmitJob submitJob) throws DataVinesServerException { - return jobExecutionService.submitJob(submitJob); - } - - @ApiOperation(value = "submit external data reconciliation job", response = Long.class) - @PostMapping(value = "/submit/data-reconciliation", consumes = MediaType.APPLICATION_JSON_VALUE) - public Object submitDataReconJob(@Valid @RequestBody SubmitJob submitJob) throws DataVinesServerException { - return jobExecutionService.submitJob(submitJob); - } - - @ApiOperation(value = "kill job", response = Long.class) - @DeleteMapping(value = "/kill/{executionId}") - public Object killTask(@PathVariable("executionId") Long executionId) { - return jobExecutionService.killJob(executionId); - } - - @ApiOperation(value = "get job execution status", response = String.class) - @GetMapping(value = "/status/{executionId}") - public Object getTaskStatus(@PathVariable("executionId") Long executionId) { - return jobExecutionService.getById(executionId).getStatus().getDescription(); - } - - @ApiOperation(value = "get job execution list by job id", response = JobExecution.class, responseContainer = "list") - @GetMapping(value = "/list/{jobId}") - public Object getJobExecutionListByJobId(@PathVariable("jobId") Long jobId) { - return jobExecutionService.listByJobId(jobId); - } - - @Deprecated - @ApiOperation(value = "get job execution result", response = JobExecutionResultVO.class) - @GetMapping(value = "/result/{executionId}") - public Object getJobExecutionResultInfo(@PathVariable("executionId") Long executionId) { - return jobExecutionResultService.getResultVOByJobExecutionId(executionId); - } - - @ApiOperation(value = "get job execution result", response = JobExecutionResultVO.class) - @GetMapping(value = "/list/result/{executionId}") - public Object getJobExecutionResultInfoList(@PathVariable("executionId") Long executionId) { - return jobExecutionResultService.getResultVOListByJobExecutionId(executionId); - } - - @ApiOperation(value = "get job execution page", response = JobExecutionResultVO.class, responseContainer = "page") - @PostMapping(value = "/page") - public Object page(@Valid @RequestBody JobExecutionPageParam jobExecutionPageParam) { - return jobExecutionService.getJobExecutionPage(jobExecutionPageParam); - } - - @ApiOperation(value = "get job execution error data page", response = Object.class, responseContainer = "page") - @GetMapping(value = "/errorDataPage") - public Object readErrorDataPage(@RequestParam("taskId") Long taskId, - @RequestParam("pageNumber") Integer pageNumber, - @RequestParam("pageSize") Integer pageSize){ - return jobExecutionErrorDataService.readErrorDataPage(taskId, pageNumber, pageSize); - } - - @ApiOperation(value = "get job execution agg pie", response = JobExecutionResultVO.class) - @PostMapping(value = "/agg-pie") - public Object getExecutionAggPie(@Valid @RequestBody JobExecutionDashboardParam dashboardParam) { - return jobExecutionService.getJobExecutionAggPie(dashboardParam); - } - - @ApiOperation(value = "get job execution trend bar", response = JobExecutionResultVO.class) - @PostMapping(value = "/trend-bar") - public Object getExecutionTrendBar(@Valid @RequestBody JobExecutionDashboardParam dashboardParam) { - return jobExecutionService.getJobExecutionTrendBar(dashboardParam); - } -} ->>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java b/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java index 19560b95d..a484e26f4 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java +++ b/datavines-server/src/main/java/io/datavines/server/api/controller/JobQualityReportController.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -82,88 +81,3 @@ public Object getScoreTrendByCondition(@Valid @RequestBody JobQualityReportDashb return jobQualityReportService.getScoreTrendByCondition(dashboardParam); } } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.server.api.controller; - -import io.datavines.common.entity.job.SubmitJob; -import io.datavines.core.aop.RefreshToken; -import io.datavines.core.constant.DataVinesConstants; -import io.datavines.core.exception.DataVinesServerException; -import io.datavines.server.api.dto.bo.job.JobExecutionDashboardParam; -import io.datavines.server.api.dto.bo.job.JobExecutionPageParam; -import io.datavines.server.api.dto.bo.job.JobQualityReportDashboardParam; -import io.datavines.server.api.dto.vo.JobExecutionResultVO; -import io.datavines.server.repository.entity.JobExecution; -import io.datavines.server.repository.service.JobExecutionErrorDataService; -import io.datavines.server.repository.service.JobExecutionResultService; -import io.datavines.server.repository.service.JobExecutionService; -import io.datavines.server.repository.service.JobQualityReportService; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.web.bind.annotation.*; - -import javax.validation.Valid; - -@Slf4j -@Api(value = "job-quality-report", tags = "job-quality-report", produces = MediaType.APPLICATION_JSON_VALUE) -@RestController -@RequestMapping(value = DataVinesConstants.BASE_API_PATH + "/job/quality-report", produces = MediaType.APPLICATION_JSON_VALUE) -@RefreshToken -public class JobQualityReportController { - - @Autowired - private JobExecutionService jobExecutionService; - - @Autowired - private JobExecutionResultService jobExecutionResultService; - - @Autowired - private JobExecutionErrorDataService jobExecutionErrorDataService; - - @Autowired - private JobQualityReportService jobQualityReportService; - - @ApiOperation(value = "get job quality report page", response = JobExecutionResultVO.class, responseContainer = "page") - @PostMapping(value = "/page") - public Object page(@Valid @RequestBody JobQualityReportDashboardParam dashboardParam) { - return jobQualityReportService.getQualityReportPage(dashboardParam); - } - - @ApiOperation(value = "get job quality report page", response = JobExecutionResultVO.class, responseContainer = "list") - @GetMapping(value = "/listColumnExecution") - public Object listColumnExecution(@RequestParam Long reportId) { - return jobQualityReportService.listColumnExecution(reportId); - } - - @ApiOperation(value = "get job quality report score", response = JobExecutionResultVO.class) - @PostMapping(value = "/score") - public Object getScoreByCondition(@Valid @RequestBody JobQualityReportDashboardParam dashboardParam) { - return jobQualityReportService.getScoreByCondition(dashboardParam); - } - - @ApiOperation(value = "get job quality report score trend", response = JobExecutionResultVO.class) - @PostMapping(value = "/score-trend") - public Object getScoreTrendByCondition(@Valid @RequestBody JobQualityReportDashboardParam dashboardParam) { - return jobQualityReportService.getScoreTrendByCondition(dashboardParam); - } -} ->>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java b/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java index e85c1f527..808cf5139 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java +++ b/datavines-server/src/main/java/io/datavines/server/api/controller/MetricController.java @@ -31,7 +31,6 @@ import io.swagger.annotations.ApiOperation; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.*; - import javax.validation.constraints.NotNull; import java.util.*; import java.util.stream.Collectors; @@ -158,12 +157,8 @@ public Object getExpectedTypeList(@PathVariable("type") String type) { afterFilterSet = expectedValueList.stream() .map(it ->it.replace("local_", "") .replace("spark_","") -<<<<<<< HEAD .replace("livy_","") .replace("flink_","")) -======= - .replace("livy_","")) ->>>>>>> upstream/dev .collect(Collectors.toSet()); List<Item> items = new ArrayList<>(); diff --git a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java index e63e21e1f..85d8057d9 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java +++ b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionDashboardParam.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -39,45 +38,3 @@ public class JobExecutionDashboardParam { private String endTime; } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.server.api.dto.bo.job; - -import lombok.Data; - -import javax.validation.constraints.NotNull; - -@Data -@NotNull(message = "JobExecutionPageParam cannot be null") -public class JobExecutionDashboardParam { - - private Long datasourceId; - - private String metricType; - - private String schemaName; - - private String tableName; - - private String columnName; - - private String startTime; - - private String endTime; -} ->>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java index ded66df17..51013d2af 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java +++ b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobExecutionPageParam.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -55,61 +54,3 @@ public class JobExecutionPageParam { private String columnSearch; } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.server.api.dto.bo.job; - -import lombok.Data; -import javax.validation.constraints.NotNull; - -@Data -@NotNull(message = "JobExecutionPageParam cannot be null") -public class JobExecutionPageParam { - - private Long datasourceId; - - private Integer status; - - private String searchVal; - - private Long jobId; - - private String metricType; - - private String schemaName; - - private String tableName; - - private String columnName; - - private String startTime; - - private String endTime; - - private Integer pageNumber; - - private Integer pageSize; - - private String schemaSearch; - - private String tableSearch; - - private String columnSearch; - -} ->>>>>>> upstream/dev diff --git a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java index 3609cab2b..c811f6921 100644 --- a/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java +++ b/datavines-server/src/main/java/io/datavines/server/api/dto/bo/job/JobQualityReportDashboardParam.java @@ -1,4 +1,3 @@ -<<<<<<< HEAD /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -42,48 +41,3 @@ public class JobQualityReportDashboardParam { private int pageSize; } -======= -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.datavines.server.api.dto.bo.job; - -import lombok.Data; - -import javax.validation.constraints.NotNull; - -@Data -@NotNull(message = "JobQualityReportDashboardParam cannot be null") -public class JobQualityReportDashboardParam { - - @NotNull(message = "datasourceId can not be null") - private Long datasourceId; - - private String schemaName; - - private String tableName; - - private String startTime; - - private String endTime; - - private String reportDate; - - private int pageNumber; - - private int pageSize; -} ->>>>>>> upstream/dev From 64a1d0495db976c65f13df1b9c5f41d17e6688a9 Mon Sep 17 00:00:00 2001 From: GSHF <18663587295@sohu.com> Date: Sun, 29 Dec 2024 00:09:28 +0800 Subject: [PATCH 5/5] feat: update application config and UI components --- .../src/main/resources/application.yaml | 17 +++++++---------- .../MetricModal/ActuatorConfigure/index.tsx | 15 --------------- .../MetricModal/RunEvnironment/index.tsx | 7 ------- .../Editor/components/MetricModal/type.ts | 11 ----------- datavines-ui/Editor/locale/en_US.ts | 5 ----- datavines-ui/Editor/locale/zh_CN.ts | 3 --- datavines-ui/src/locale/en_US.ts | 8 +------- datavines-ui/src/locale/zh_CN.ts | 8 +------- 8 files changed, 9 insertions(+), 65 deletions(-) diff --git a/datavines-server/src/main/resources/application.yaml b/datavines-server/src/main/resources/application.yaml index 9ef555ac3..ff6aa7ee6 100644 --- a/datavines-server/src/main/resources/application.yaml +++ b/datavines-server/src/main/resources/application.yaml @@ -21,10 +21,10 @@ spring: application: name: datavines-server datasource: - driver-class-name: org.postgresql.Driver - url: jdbc:postgresql://127.0.0.1:5432/datavines - username: postgres - password: 123456 + driver-class-name: com.mysql.cj.jdbc.Driver + url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai + username: root + password: Gdp7866488 hikari: connection-test-query: select 1 minimum-idle: 5 @@ -55,7 +55,7 @@ spring: org.quartz.jobStore.misfireThreshold: 60000 org.quartz.scheduler.batchTriggerAcquisitionMaxCount: 1 org.quartz.scheduler.makeSchedulerThreadDaemon: true - org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate + org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate org.quartz.jobStore.clusterCheckinInterval: 5000 mvc: pathmatch: @@ -87,10 +87,9 @@ spring: on-profile: mysql datasource: driver-class-name: com.mysql.cj.jdbc.Driver - url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=Asia/Shanghai + url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai username: root - password: 123456 -<<<<<<< HEAD + password: Gdp7866488 hikari: connection-test-query: select 1 minimum-idle: 5 @@ -102,8 +101,6 @@ spring: idle-timeout: 600000 leak-detection-threshold: 0 initialization-fail-timeout: 1 -======= ->>>>>>> upstream/dev quartz: properties: org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate \ No newline at end of file diff --git a/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx b/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx index 4bb8a2012..2c6d3d668 100644 --- a/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx +++ b/datavines-ui/Editor/components/MetricModal/ActuatorConfigure/index.tsx @@ -35,7 +35,6 @@ const Index = ({ form, detail }: InnerProps) => { parameter = JSON.parse(engineParameter); } form.setFieldsValue({ -<<<<<<< HEAD deployMode: parameter.deployMode ?? 'local', taskManagerCount: parameter.taskManagerCount ?? 2, taskManagerMemory: parameter.taskManagerMemory ?? '2G', @@ -43,16 +42,12 @@ const Index = ({ form, detail }: InnerProps) => { parallelism: parameter.parallelism ?? 1, jobName: parameter.jobName ?? '', yarnQueue: parameter.yarnQueue ?? '', - others: parameter.others ?? '--conf flink.yarn.maxAppAttempts=1', -======= - deployMode: parameter.deployMode ?? 'cluster', driverCores: parameter.driverCores ?? 1, driverMemory: parameter.driverMemory ?? '512M', numExecutors: parameter.numExecutors ?? 2, executorMemory: parameter.executorMemory ?? '2G', executorCores: parameter.executorCores ?? 2, others: parameter.others ?? '--conf spark.yarn.maxAppAttempts=1', ->>>>>>> upstream/dev tenantCode: detail?.tenantCode ? detail.tenantCode.toString() : '', env: detail?.env ? detail.env.toString() : '', engineType: detail?.engineType ? detail.engineType.toString() : 'local', @@ -139,7 +134,6 @@ const Index = ({ form, detail }: InnerProps) => { </Form.Item> </> ); -<<<<<<< HEAD const renderFlink = () => ( <> <Form.Item @@ -229,8 +223,6 @@ const Index = ({ form, detail }: InnerProps) => { </Form.Item> </> ); -======= ->>>>>>> upstream/dev return ( <Title title={intl.formatMessage({ id: 'dv_metric_title_actuator_engine_config' })}> <Row gutter={30}> @@ -253,7 +245,6 @@ const Index = ({ form, detail }: InnerProps) => { <Form.Item noStyle dependencies={['engineType']}> {() => { const value = form.getFieldValue('engineType'); -<<<<<<< HEAD if (value === 'spark' || value === 'livy') { return renderSpark(); } @@ -261,12 +252,6 @@ const Index = ({ form, detail }: InnerProps) => { return renderFlink(); } return null; -======= - if (value !== 'spark' && value !== 'livy') { - return null; - } - return renderSpark(); ->>>>>>> upstream/dev }} </Form.Item> diff --git a/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx b/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx index f1b20a355..f7a2fccae 100644 --- a/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx +++ b/datavines-ui/Editor/components/MetricModal/RunEvnironment/index.tsx @@ -123,17 +123,10 @@ const Index = ({ form, id, detail }: InnerProps) => { <Form.Item noStyle dependencies={['engineType']}> {() => { const value = form.getFieldValue('engineType'); -<<<<<<< HEAD if (value === 'spark' || value === 'livy' || value === 'flink') { return render(); } return null; -======= - if (value !== 'spark' && value !== 'livy') { - return null; - } - return render(); ->>>>>>> upstream/dev }} </Form.Item> ); diff --git a/datavines-ui/Editor/components/MetricModal/type.ts b/datavines-ui/Editor/components/MetricModal/type.ts index 9b01d18a5..f94cff3bd 100644 --- a/datavines-ui/Editor/components/MetricModal/type.ts +++ b/datavines-ui/Editor/components/MetricModal/type.ts @@ -43,7 +43,6 @@ export type TParameterItem = { } export type TEngineParameter = { -<<<<<<< HEAD programType: string, // JAVA deployMode: string, driverCores: number, @@ -61,16 +60,6 @@ export type TEngineParameter = { taskManagerCount?: number, taskManagerMemory?: string, jobManagerMemory?: string, -======= - programType:string, // JAVA - deployMode:string, - driverCores: number, - driverMemory: string, - numExecutors: number, - executorMemory:string, - executorCores: number, - others: string, ->>>>>>> upstream/dev } export type TDetail = null | { diff --git a/datavines-ui/Editor/locale/en_US.ts b/datavines-ui/Editor/locale/en_US.ts index 60b01cd83..944bc78b5 100644 --- a/datavines-ui/Editor/locale/en_US.ts +++ b/datavines-ui/Editor/locale/en_US.ts @@ -35,16 +35,11 @@ export default { dv_metric_actuator_executor_numbers: 'Number of executors', dv_metric_actuator_executor_memory: 'Number of executor memory', dv_metric_actuator_executor_cores: 'Number of executor cores', -<<<<<<< HEAD dv_metric_actuator_executor_options: 'Options Parameters', dv_metric_actuator_parallelism: 'Parallelism', dv_metric_actuator_job_name: 'Job Name', dv_metric_actuator_yarn_queue: 'Yarn Queue', dv_metric_linux_user: 'Linux User', -======= - dv_metric_actuator_executor_options: 'Options', - dv_metric_linux_user: 'Linux user', ->>>>>>> upstream/dev dv_metric_create_time: 'Create time', dv_metric_update_time: 'Update time', dv_metric_name: 'Name', diff --git a/datavines-ui/Editor/locale/zh_CN.ts b/datavines-ui/Editor/locale/zh_CN.ts index baf64bb86..51e257e05 100644 --- a/datavines-ui/Editor/locale/zh_CN.ts +++ b/datavines-ui/Editor/locale/zh_CN.ts @@ -36,12 +36,9 @@ export default { dv_metric_actuator_executor_memory: 'Executor内存数', dv_metric_actuator_executor_cores: 'Executor核心数', dv_metric_actuator_executor_options: '选项参数', -<<<<<<< HEAD dv_metric_actuator_parallelism: '并行度', dv_metric_actuator_job_name: '作业名称', dv_metric_actuator_yarn_queue: 'Yarn队列', -======= ->>>>>>> upstream/dev dv_metric_linux_user: 'Linux用户', dv_metric_create_time: '创建时间', dv_metric_update_time: '更新时间', diff --git a/datavines-ui/src/locale/en_US.ts b/datavines-ui/src/locale/en_US.ts index 51b462438..364225577 100644 --- a/datavines-ui/src/locale/en_US.ts +++ b/datavines-ui/src/locale/en_US.ts @@ -292,10 +292,7 @@ export default { job_log_refresh: 'Refresh', job_log_download: 'Download', job_log_fullScreen: 'FullScreen', -<<<<<<< HEAD dv_task_manager_count: 'Task Manager Count', -======= ->>>>>>> upstream/dev error_create_btn: 'Create error data store', error_table_store_name: 'Storage Name', @@ -333,7 +330,6 @@ export default { next_ten_cron_run_times: 'Next ten cron run times', view_future_execute_plan: 'view future execute plan', test_send: 'test send', -<<<<<<< HEAD dv_deploy_mode: 'Deploy Mode', dv_deploy_mode_required: 'Please select deploy mode', @@ -349,7 +345,5 @@ export default { dv_flink_deploy_mode_yarn_application: 'Yarn Application Mode', dv_deploy_mode_cluster: 'Cluster Mode', dv_deploy_mode_yarn: 'Yarn Mode', - dv_deploy_mode_local: 'Local Mode', -======= ->>>>>>> upstream/dev + dv_deploy_mode_local: 'Local Mode' }; diff --git a/datavines-ui/src/locale/zh_CN.ts b/datavines-ui/src/locale/zh_CN.ts index 1a3da7e81..0ace2b7de 100644 --- a/datavines-ui/src/locale/zh_CN.ts +++ b/datavines-ui/src/locale/zh_CN.ts @@ -297,12 +297,9 @@ export default { error_title: '存储管理', user_title: '用户管理', -<<<<<<< HEAD dv_task_manager_count: 'Task Manager数量', -======= ->>>>>>> upstream/dev label_title: '标签分类', label_list: '标签列表', label_add_category: '新增标签分类', @@ -333,7 +330,6 @@ export default { next_ten_cron_run_times: '未来十次执行时间', view_future_execute_plan: '查看未来执行计划', test_send: '测试发送', -<<<<<<< HEAD dv_deploy_mode: '部署模式', dv_deploy_mode_required: '请选择部署模式', @@ -349,7 +345,5 @@ export default { dv_flink_deploy_mode_yarn_application: 'Yarn Application模式', dv_deploy_mode_cluster: '集群模式', dv_deploy_mode_yarn: 'Yarn模式', - dv_deploy_mode_local: '本地模式', -======= ->>>>>>> upstream/dev + dv_deploy_mode_local: '本地模式' };