Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature][Engine]data profile support spark/livy mode #260

Merged
merged 3 commits into from
Nov 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datavines.common.entity;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.RequiredArgsConstructor;

@Data
@AllArgsConstructor
@RequiredArgsConstructor
public class SparkEngineParameter {

/**
* program type
*/
private String programType;

/**
* deploy mode
*/
private String deployMode;

/**
* driver-cores Number of cores used by the driver, only in cluster mode
*/
private int driverCores;

/**
* driver-memory Memory for driver
*/
private String driverMemory;

/**
* num-executors Number of executors to launch
*/
private int numExecutors;

/**
* executor-cores Number of cores per executor
*/
private int executorCores;

/**
* Memory per executor
*/
private String executorMemory;

/**
* other arguments
*/
private String others;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datavines.engine.spark.config;

import io.datavines.common.config.SinkConfig;
import io.datavines.common.config.enums.SinkType;
import io.datavines.common.entity.job.BaseJobParameter;
import io.datavines.common.exception.DataVinesException;
import org.apache.commons.collections4.CollectionUtils;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import static io.datavines.common.ConfigConstants.METRIC_UNIQUE_KEY;

public class SparkDataProfileMetricBuilder extends BaseSparkConfigurationBuilder {

@Override
public void buildSinkConfigs() throws DataVinesException {
List<SinkConfig> sinkConfigs = new ArrayList<>();

List<BaseJobParameter> metricJobParameterList = jobExecutionParameter.getMetricParameterList();
if (CollectionUtils.isNotEmpty(metricJobParameterList)) {
for (BaseJobParameter parameter : metricJobParameterList) {
String metricUniqueKey = getMetricUniqueKey(parameter);
Map<String, String> metricInputParameter = metric2InputParameter.get(metricUniqueKey);
metricInputParameter.put(METRIC_UNIQUE_KEY, metricUniqueKey);
String profileSinkSql = SparkSinkSqlBuilder.getProfileValueSql().replace("${actual_value}",
"actual_value_" + metricUniqueKey);

SinkConfig actualValueSinkConfig = getValidateResultDataSinkConfig(
null, profileSinkSql, "dv_catalog_entity_profile", metricInputParameter);
actualValueSinkConfig.setType(SinkType.PROFILE_VALUE.getDescription());
sinkConfigs.add(actualValueSinkConfig);
}
}

configuration.setSinkParameters(sinkConfigs);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -65,4 +65,20 @@ public static String getMultiTableComparisonSinkSql() {
+ " join ( ${expected_execute_sql} ) tmp2";
}

public static String getProfileValueSql() {

List<String> columnList = new ArrayList<>(MetricConstants.PROFILE_COLUMN_LIST.size());

for (ColumnInfo columnInfo : MetricConstants.PROFILE_COLUMN_LIST) {

if (columnInfo.isNeedSingleQuotation()) {
columnList.add(StringUtils.wrapperSingleQuotes("${" + columnInfo.getParameterName() + "}") + " as "
+ columnInfo.getName());
} else {
columnList.add("${" + columnInfo.getParameterName() + "}" + " as " + columnInfo.getName());
}
}

return "select " + String.join(", ", columnList) + " from ${actual_table}";
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
spark_single_table=io.datavines.engine.spark.config.SparkSingleTableMetricBuilder
spark_data_profile=io.datavines.engine.spark.config.SparkDataProfileMetricBuilder
spark_multi_table_accuracy=io.datavines.engine.spark.config.SparkMultiTableAccuracyMetricBuilder
spark_multi_table_value_comparison=io.datavines.engine.spark.config.SparkMultiTableValueComparisonMetricBuilder
livy_single_table=io.datavines.engine.spark.config.SparkSingleTableMetricBuilder
livy_data_profile=io.datavines.engine.spark.config.SparkDataProfileMetricBuilder
livy_multi_table_accuracy=io.datavines.engine.spark.config.SparkMultiTableAccuracyMetricBuilder
livy_multi_table_value_comparison=io.datavines.engine.spark.config.SparkMultiTableValueComparisonMetricBuilder
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,12 @@
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import io.datavines.common.datasource.jdbc.entity.ColumnInfo;
import io.datavines.common.entity.SparkEngineParameter;
import io.datavines.common.entity.job.BaseJobParameter;
import io.datavines.common.enums.DataVinesDataType;
import io.datavines.common.enums.EntityRelType;
import io.datavines.common.enums.JobType;
import io.datavines.common.utils.CommonPropertyUtils;
import io.datavines.common.utils.DateUtils;
import io.datavines.common.utils.JSONUtils;
import io.datavines.common.utils.StringUtils;
Expand Down Expand Up @@ -926,6 +928,8 @@ public long executeDataProfileJob(RunProfileRequest runProfileRequest, int runni
createOrUpdate.setTableName(tableName);
createOrUpdate.setSelectedColumn(String.join(",", columns));
createOrUpdate.setRunningNow(runningNow);
engineParameter(createOrUpdate);

long jobId = jobService.createOrUpdateDataProfileJob(createOrUpdate);

if (jobId != -1L) {
Expand Down Expand Up @@ -954,6 +958,40 @@ public long executeDataProfileJob(RunProfileRequest runProfileRequest, int runni
return jobId;
}

private void engineParameter(DataProfileJobCreateOrUpdate createOrUpdate) {
String profileEngine = CommonPropertyUtils.getString("profile.execute.engine");
if ("livy".equalsIgnoreCase(profileEngine)) {

createOrUpdate.setEngineType("livy");
String deployMode = CommonPropertyUtils.getString("livy.engine.parameter.deploy.mode", "cluster");
int numExecutors = CommonPropertyUtils.getInt("livy.engine.parameter.num.executors", 1);
int driverCores = CommonPropertyUtils.getInt("livy.engine.parameter.driver.cores", 1);
String driverMemory = CommonPropertyUtils.getString("livy.engine.parameter.driver.memory", "512M");
int executorCores = CommonPropertyUtils.getInt("livy.engine.parameter.executor.cores", 1);
String executorMemory = CommonPropertyUtils.getString("livy.engine.parameter.executor.memory", "512M");
String others = CommonPropertyUtils.getString("livy.engine.parameter.others");

SparkEngineParameter engineParameter = new SparkEngineParameter("JAVA", deployMode, driverCores, driverMemory,
numExecutors, executorCores, executorMemory, others);

createOrUpdate.setEngineParameter(JSONUtils.toJsonString(engineParameter));
} else if ("spark".equalsIgnoreCase(profileEngine)) {

createOrUpdate.setEngineType("spark");
String deployMode = CommonPropertyUtils.getString("spark.engine.parameter.deploy.mode", "cluster");
int numExecutors = CommonPropertyUtils.getInt("spark.engine.parameter.num.executors", 1);
int driverCores = CommonPropertyUtils.getInt("spark.engine.parameter.driver.cores", 1);
String driverMemory = CommonPropertyUtils.getString("spark.engine.parameter.driver.memory", "512M");
int executorCores = CommonPropertyUtils.getInt("spark.engine.parameter.executor.cores", 1);
String executorMemory = CommonPropertyUtils.getString("spark.engine.parameter.executor.memory", "512M");
String others = CommonPropertyUtils.getString("spark.engine.parameter.others");

SparkEngineParameter engineParameter = new SparkEngineParameter("JAVA", deployMode, driverCores, driverMemory,
numExecutors, executorCores, executorMemory, others);
createOrUpdate.setEngineParameter(JSONUtils.toJsonString(engineParameter));
}
}

@Override
public List<DataTime2ValueItem> listTableRecords(String uuid, String starTime, String endTime) {
return catalogEntityProfileService.listTableRecords(uuid, starTime, endTime);
Expand Down
7 changes: 7 additions & 0 deletions scripts/sql/datavines-mysql.sql
Original file line number Diff line number Diff line change
Expand Up @@ -827,6 +827,13 @@ INSERT INTO `dv_config` VALUES ('22', '-1', 'livy.task.jars', CONCAT('datavines-
'datavines-engine-api-1.0.0-SNAPSHOT.jar,mysql-connector-java-8.0.16.jar,httpclient-4.4.1.jar,'
'httpcore-4.4.1.jar,postgresql-42.2.6.jar,presto-jdbc-0.283.jar,trino-jdbc-407.jar,clickhouse-jdbc-0.1.53.jar'),
'1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('23', '-1', 'profile.execute.engine', 'local', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('24', '-1', 'spark.engine.parameter.deploy.mode', 'cluster', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('25', '-1', 'spark.engine.parameter.num.executors', '1', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('26', '-1', 'spark.engine.parameter.driver.cores', '1', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('27', '-1', 'spark.engine.parameter.driver.memory', '512M', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('28', '-1', 'spark.engine.parameter.executor.cores', '1', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');
INSERT INTO `dv_config` VALUES ('29', '-1', 'spark.engine.parameter.executor.memory', '512M', '1', '1', '2023-09-05 21:02:38', '1', '2023-09-05 21:02:38');

INSERT INTO `dv_user` (`id`, `username`, `password`, `email`, `phone`, `admin`) VALUES ('1', 'admin', '$2a$10$9ZcicUYFl/.knBi9SE53U.Nml8bfNeArxr35HQshxXzimbA6Ipgqq', '[email protected]', NULL, '0');
INSERT INTO `dv_workspace` (`id`, `name`, `create_by`, `update_by`) VALUES ('1', "admin\'s default", '1', '1');
Expand Down
Loading