From bc09f8fcec839150dabe2f0b04101c8ac215cb35 Mon Sep 17 00:00:00 2001 From: aiwenmo <32723967+aiwenmo@users.noreply.github.com> Date: Fri, 3 Nov 2023 12:28:37 +0800 Subject: [PATCH 01/21] [Feature-1842][client] Analyze lineage from the original relNode, not the optimized relNode, and add unit tests (#2488) * [Feature-1842][client] Analyze lineage from the original relNode, not the optimized relNode, and add unit tests * spotless --------- Co-authored-by: wenmo <32723967+wenmo@users.noreply.github.com> --- dinky-client/dinky-client-1.14/pom.xml | 5 + .../calcite/rel/metadata/RelColumnOrigin.java | 116 ++++++ .../rel/metadata/RelMdColumnOrigins.java | 352 ++++++++++++------ .../executor/CustomTableEnvironmentImpl.java | 9 +- .../FlinkStreamProgramWithoutPhysical.java | 215 ----------- .../java/org/dinky/utils/LineageContext.java | 83 +---- .../org/dinky/utils/LineageContextTest.java | 112 ++++++ dinky-client/dinky-client-1.15/pom.xml | 5 + .../calcite/rel/metadata/RelColumnOrigin.java | 116 ++++++ .../rel/metadata/RelMdColumnOrigins.java | 352 ++++++++++++------ .../executor/CustomTableEnvironmentImpl.java | 9 +- .../FlinkStreamProgramWithoutPhysical.java | 246 ------------ .../java/org/dinky/utils/LineageContext.java | 94 +---- .../org/dinky/utils/LineageContextTest.java | 112 ++++++ dinky-client/dinky-client-1.16/pom.xml | 5 + .../calcite/rel/metadata/RelColumnOrigin.java | 116 ++++++ .../rel/metadata/RelMdColumnOrigins.java | 352 ++++++++++++------ .../executor/CustomTableEnvironmentImpl.java | 8 +- .../FlinkStreamProgramWithoutPhysical.java | 246 ------------ .../java/org/dinky/utils/LineageContext.java | 89 +---- .../org/dinky/utils/LineageContextTest.java | 112 ++++++ dinky-client/dinky-client-1.17/pom.xml | 5 + .../calcite/rel/metadata/RelColumnOrigin.java | 116 ++++++ .../rel/metadata/RelMdColumnOrigins.java | 352 ++++++++++++------ .../executor/CustomTableEnvironmentImpl.java | 8 +- .../FlinkStreamProgramWithoutPhysical.java | 246 ------------ .../java/org/dinky/utils/LineageContext.java | 89 +---- .../org/dinky/utils/LineageContextTest.java | 112 ++++++ dinky-client/dinky-client-1.18/pom.xml | 5 + .../calcite/rel/metadata/RelColumnOrigin.java | 116 ++++++ .../rel/metadata/RelMdColumnOrigins.java | 352 ++++++++++++------ .../executor/CustomTableEnvironmentImpl.java | 8 +- .../java/org/dinky/utils/LineageContext.java | 89 +---- .../org/dinky/utils/LineageContextTest.java | 112 ++++++ .../java/org/dinky/data/model/LineageRel.java | 63 +++- pom.xml | 4 +- 36 files changed, 2420 insertions(+), 2011 deletions(-) create mode 100644 dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java delete mode 100644 dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java create mode 100644 dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java create mode 100644 dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java delete mode 100644 dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java create mode 100644 dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java create mode 100644 dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java delete mode 100644 dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java create mode 100644 dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java create mode 100644 dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java delete mode 100644 dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java create mode 100644 dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java create mode 100644 dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java create mode 100644 dinky-client/dinky-client-1.18/src/test/java/org/dinky/utils/LineageContextTest.java diff --git a/dinky-client/dinky-client-1.14/pom.xml b/dinky-client/dinky-client-1.14/pom.xml index 6472f461a4..4a6e807f45 100644 --- a/dinky-client/dinky-client-1.14/pom.xml +++ b/dinky-client/dinky-client-1.14/pom.xml @@ -65,6 +65,11 @@ javax.activation activation + + junit + junit + test + diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java b/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java new file mode 100644 index 0000000000..5fc8dc24cb --- /dev/null +++ b/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java @@ -0,0 +1,116 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.calcite.rel.metadata; + +import org.apache.calcite.plan.RelOptTable; + +/** + * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelColumnOrigin + *

+ * Modification point: + *

    + *
  1. add transform field and related code. + *
+ * + * @description: RelColumnOrigin is a data structure describing one of the origins of an + * output column produced by a relational expression. + * @author: HamaWhite + */ +public class RelColumnOrigin { + // ~ Instance fields -------------------------------------------------------- + + private final RelOptTable originTable; + + private final int iOriginColumn; + + private final boolean isDerived; + + /** + * Stores the expression for data conversion, + * which source table fields are transformed by which expression the target field + */ + private String transform; + + // ~ Constructors ----------------------------------------------------------- + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + } + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived, String transform) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + this.transform = transform; + } + + // ~ Methods ---------------------------------------------------------------- + + /** + * Returns table of origin. + */ + public RelOptTable getOriginTable() { + return originTable; + } + + /** + * Returns the 0-based index of column in origin table; whether this ordinal + * is flattened or unflattened depends on whether UDT flattening has already + * been performed on the relational expression which produced this + * description. + */ + public int getOriginColumnOrdinal() { + return iOriginColumn; + } + + /** + * Consider the query select a+b as c, d as e from t. The + * output column c has two origins (a and b), both of them derived. The + * output column d as one origin (c), which is not derived. + * + * @return false if value taken directly from column in origin table; true + * otherwise + */ + public boolean isDerived() { + return isDerived; + } + + public String getTransform() { + return transform; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RelColumnOrigin)) { + return false; + } + RelColumnOrigin other = (RelColumnOrigin) obj; + return originTable.getQualifiedName().equals(other.originTable.getQualifiedName()) + && (iOriginColumn == other.iOriginColumn) + && (isDerived == other.isDerived); + } + + @Override + public int hashCode() { + return originTable.getQualifiedName().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + } +} diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java b/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java index b8b0a7aed2..5c8aae002a 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java @@ -36,7 +36,7 @@ import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.TableFunctionScan; import org.apache.calcite.rel.core.TableModify; -import org.apache.calcite.rel.core.Window; +import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexFieldAccess; @@ -48,33 +48,47 @@ import org.apache.calcite.rex.RexVisitor; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.util.BuiltInMethod; -import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelMdColumnOrigins * - *

Modification point: 1. Support lookup join, add method getColumnOrigins(Snapshot - * rel,RelMetadataQuery mq, int iOutputColumn) 2. Support watermark, add method - * getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) 3. Support table function, - * add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) 4. Support - * field AS LOCALTIMESTAMP, modify method getColumnOrigins(Calc rel, RelMetadataQuery mq, int - * iOutputColumn) 5. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int - * iOutputColumn) 6. Support ROW_NUMBER(), add method getColumnOrigins(Window rel, RelMetadataQuery - * mq, int iOutputColumn)* + *

Modification point: + *

    + *
  1. Support lookup join, add method getColumnOrigins(Snapshot rel,RelMetadataQuery mq, int iOutputColumn) + *
  2. Support watermark, add method getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) + *
  3. Support table function, add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) + *
  4. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) + *
  5. Support transform, add method createDerivedColumnOrigins(Set inputSet, String transform, boolean originTransform), and related code + *
  6. Support field AS LOCALTIMESTAMP, modify method getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
  7. Support PROCTIME() is the first filed, add method computeIndexWithOffset, used by getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
      * - * @description: RelMdColumnOrigins supplies a default implementation of {@link - * RelMetadataQuery#getColumnOrigins} for the standard logical algebra. - * @version: 1.0.0 + * @description: RelMdColumnOrigins supplies a default implementation of {@link RelMetadataQuery#getColumnOrigins} for the standard logical algebra. + * @author: HamaWhite */ public class RelMdColumnOrigins implements MetadataHandler { + private static final Logger LOG = LoggerFactory.getLogger(RelMdColumnOrigins.class); + + public static final String DELIMITER = "."; + public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider.reflectiveSource( BuiltInMethod.COLUMN_ORIGIN.method, new RelMdColumnOrigins()); @@ -98,10 +112,10 @@ public Set getColumnOrigins(Aggregate rel, RelMetadataQuery mq, // Aggregate columns are derived from input columns AggregateCall call = rel.getAggCallList().get(iOutputColumn - rel.getGroupCount()); - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (Integer iInput : call.getArgList()) { Set inputSet = mq.getColumnOrigins(rel.getInput(), iInput); - inputSet = createDerivedColumnOrigins(inputSet); + inputSet = createDerivedColumnOrigins(inputSet, call.toString(), true); if (inputSet != null) { set.addAll(inputSet); } @@ -132,7 +146,9 @@ public Set getColumnOrigins(Join rel, RelMetadataQuery mq, int return set; } - /** Support the field blood relationship of table function */ + /** + * Support the field blood relationship of table function + */ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) { List leftFieldList = rel.getLeft().getRowType().getFieldList(); @@ -142,68 +158,126 @@ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, if (iOutputColumn < nLeftColumns) { set = mq.getColumnOrigins(rel.getLeft(), iOutputColumn); } else { - // get the field name of the left table configured in the Table Function on the right - TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); - RexCall rexCall = (RexCall) tableFunctionScan.getCall(); - // support only one field in table function - RexFieldAccess rexFieldAccess = (RexFieldAccess) rexCall.operands.get(0); - String fieldName = rexFieldAccess.getField().getName(); - - int leftFieldIndex = 0; - for (int i = 0; i < nLeftColumns; i++) { - if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { - leftFieldIndex = i; - break; + if (rel.getRight() instanceof TableFunctionScan) { + // get the field name of the left table configured in the Table Function on the right + TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); + RexCall rexCall = (RexCall) tableFunctionScan.getCall(); + // support only one field in table function + RexFieldAccess rexFieldAccess = + (RexFieldAccess) rexCall.getOperands().get(0); + String fieldName = rexFieldAccess.getField().getName(); + + int leftFieldIndex = 0; + for (int i = 0; i < nLeftColumns; i++) { + if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { + leftFieldIndex = i; + break; + } } + /** + * Get the fields from the left table, don't go to + * getColumnOrigins(TableFunctionScan rel,RelMetadataQuery mq, int iOutputColumn), + * otherwise the return is null, and the UDTF field origin cannot be parsed + */ + set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); + + // process transform for udtf + String transform = rexCall.toString().replace(rexFieldAccess.toString(), fieldName) + + DELIMITER + + tableFunctionScan.getRowType().getFieldNames().get(iOutputColumn - nLeftColumns); + set = createDerivedColumnOrigins(set, transform, false); + } else { + set = mq.getColumnOrigins(rel.getRight(), iOutputColumn - nLeftColumns); } - /** - * Get the fields from the left table, don't go to getColumnOrigins(TableFunctionScan - * rel,RelMetadataQuery mq, int iOutputColumn), otherwise the return is null, and the - * UDTF field origin cannot be parsed - */ - set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); } return set; } public Set getColumnOrigins(SetOp rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelNode input : rel.getInputs()) { Set inputSet = mq.getColumnOrigins(input, iOutputColumn); if (inputSet == null) { - return null; + return Collections.emptySet(); } set.addAll(inputSet); } return set; } - /** Support the field blood relationship of lookup join */ + /** + * Support the field blood relationship of lookup join + */ public Set getColumnOrigins(Snapshot rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } - /** Support the field blood relationship of watermark */ + /** + * Support the field blood relationship of watermark + */ public Set getColumnOrigins(SingleRel rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } /** - * Support field blood relationship of CEP. The first column is the field after PARTITION BY, - * and the other columns come from the measures in Match + * Support for new fields in the source table similar to those created with the LOCALTIMESTAMP function + */ + public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { + final RelNode input = rel.getInput(); + RexNode rexNode = rel.getProjects().get(iOutputColumn); + + if (rexNode instanceof RexInputRef) { + // Direct reference: no derivation added. + RexInputRef inputRef = (RexInputRef) rexNode; + int index = inputRef.getIndex(); + if (input instanceof TableScan) { + index = computeIndexWithOffset(rel.getProjects(), inputRef.getIndex(), iOutputColumn); + } + return mq.getColumnOrigins(input, index); + } else if (input instanceof TableScan + && rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty()) { + return mq.getColumnOrigins(input, iOutputColumn); + } + // Anything else is a derivation, possibly from multiple columns. + final Set set = getMultipleColumns(rexNode, input, mq); + return createDerivedColumnOrigins(set, rexNode.toString(), true); + } + + private int computeIndexWithOffset(List projects, int baseIndex, int iOutputColumn) { + int offset = 0; + for (int index = 0; index < iOutputColumn; index++) { + RexNode rexNode = projects.get(index); + if ((rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty())) { + offset += 1; + } + } + return baseIndex + offset; + } + + /** + * Support field blood relationship of CEP. + * The first column is the field after PARTITION BY, and the other columns come from the measures in Match */ public Set getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) { - if (iOutputColumn == 0) { + int orderCount = rel.getOrderKeys().getKeys().size(); + + if (iOutputColumn < orderCount) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } final RelNode input = rel.getInput(); - RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - 1); + RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - orderCount); RexPatternFieldRef rexPatternFieldRef = searchRexPatternFieldRef(rexNode); if (rexPatternFieldRef != null) { - return mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + final Set set = mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + String originTransform = rexNode instanceof RexCall + ? ((RexCall) rexNode).getOperands().get(0).toString() + : null; + return createDerivedColumnOrigins(set, originTransform, true); } - return null; + return Collections.emptySet(); } private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { @@ -219,46 +293,6 @@ private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { return null; } - /** Support the field blood relationship of ROW_NUMBER() */ - public Set getColumnOrigins(Window rel, RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - /** - * Haven't found a good way to judge whether the field comes from window, for the time - * being, first judge by parsing the string - */ - String fieldName = rel.getRowType().getFieldNames().get(iOutputColumn); - // for example: "w1$o0" - if (fieldName.startsWith("w") && fieldName.contains("$")) { - int groupIndex = Integer.parseInt(fieldName.substring(1, fieldName.indexOf("$"))); - final Set set = new LinkedHashSet<>(); - if (!rel.groups.isEmpty()) { - Window.Group group = rel.groups.get(groupIndex); - // process partition by keys - group.keys.asList().forEach(index -> set.addAll(mq.getColumnOrigins(input, index))); - // process order by keys - group.orderKeys - .getFieldCollations() - .forEach(e -> set.addAll(mq.getColumnOrigins(input, e.getFieldIndex()))); - } - return set; - } - return mq.getColumnOrigins(rel.getInput(), iOutputColumn); - } - - public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - RexNode rexNode = rel.getProjects().get(iOutputColumn); - - if (rexNode instanceof RexInputRef) { - // Direct reference: no derivation added. - RexInputRef inputRef = (RexInputRef) rexNode; - return mq.getColumnOrigins(input, inputRef.getIndex()); - } - // Anything else is a derivation, possibly from multiple columns. - final Set set = getMultipleColumns(rexNode, input, mq); - return createDerivedColumnOrigins(set); - } - public Set getColumnOrigins(Calc rel, final RelMetadataQuery mq, int iOutputColumn) { final RelNode input = rel.getInput(); final RexShuttle rexShuttle = new RexShuttle() { @@ -277,30 +311,6 @@ public RexNode visitLocalRef(RexLocalRef localRef) { // Direct reference: no derivation added. RexInputRef inputRef = (RexInputRef) rexNode; return mq.getColumnOrigins(input, inputRef.getIndex()); - } else if (rexNode instanceof RexCall && ((RexCall) rexNode).operands.isEmpty()) { - // support for new fields in the source table similar to those created with the - // LOCALTIMESTAMP function - TableSourceTable table = ((TableSourceTable) rel.getInput().getTable()); - if (table != null) { - String targetFieldName = rel.getProgram() - .getOutputRowType() - .getFieldList() - .get(iOutputColumn) - .getName(); - List fieldList = - table.catalogTable().getResolvedSchema().getColumnNames(); - - int index = -1; - for (int i = 0; i < fieldList.size(); i++) { - if (fieldList.get(i).equalsIgnoreCase(targetFieldName)) { - index = i; - break; - } - } - if (index != -1) { - return Collections.singleton(new RelColumnOrigin(table, index, false)); - } - } } // Anything else is a derivation, possibly from multiple columns. final Set set = getMultipleColumns(rexNode, input, mq); @@ -324,14 +334,14 @@ public Set getColumnOrigins(Exchange rel, RelMetadataQuery mq, } public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); Set mappings = rel.getColumnMappings(); if (mappings == null) { - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // This is a non-leaf transformation: say we don't // know about origins, because there are probably // columns below. - return null; + return Collections.emptySet(); } else { // This is a leaf transformation: say there are fer sure no // column origins. @@ -346,7 +356,7 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ final int column = mapping.iInputColumn; Set origins = mq.getColumnOrigins(input, column); if (origins == null) { - return null; + return Collections.emptySet(); } if (mapping.derived) { origins = createDerivedColumnOrigins(origins); @@ -357,18 +367,19 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ } // Catch-all rule when none of the others apply. + @SuppressWarnings("squid:S1172") public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, int iOutputColumn) { // NOTE jvs 28-Mar-2006: We may get this wrong for a physical table // expression which supports projections. In that case, // it's up to the plugin writer to override with the // correct information. - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // No generic logic available for non-leaf rels. - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); RelOptTable table = rel.getTable(); if (table == null) { @@ -383,7 +394,7 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i // names.) This detection assumes the table expression doesn't handle // rename as well. if (table.getRowType() != rel.getRowType()) { - return null; + return Collections.emptySet(); } set.add(new RelColumnOrigin(table, iOutputColumn, false)); @@ -392,9 +403,9 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i private Set createDerivedColumnOrigins(Set inputSet) { if (inputSet == null) { - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelColumnOrigin rco : inputSet) { RelColumnOrigin derived = new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true); set.add(derived); @@ -402,10 +413,113 @@ private Set createDerivedColumnOrigins(Set inp return set; } + private Set createDerivedColumnOrigins( + Set inputSet, String transform, boolean originTransform) { + if (inputSet == null || inputSet.isEmpty()) { + return Collections.emptySet(); + } + final Set set = new LinkedHashSet<>(); + + String finalTransform = originTransform ? computeTransform(inputSet, transform) : transform; + for (RelColumnOrigin rco : inputSet) { + RelColumnOrigin derived = + new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true, finalTransform); + set.add(derived); + } + return set; + } + + /** + * Replace the variable at the beginning of $ in input with the real field information + */ + private String computeTransform(Set inputSet, String transform) { + LOG.debug("origin transform: {}", transform); + Pattern pattern = Pattern.compile("\\$\\d+"); + Matcher matcher = pattern.matcher(transform); + + Set operandSet = new LinkedHashSet<>(); + while (matcher.find()) { + operandSet.add(matcher.group()); + } + + if (operandSet.isEmpty()) { + LOG.info("operandSet is empty"); + return null; + } + if (inputSet.size() != operandSet.size()) { + LOG.warn( + "The number [{}] of fields in the source tables are not equal to operands [{}]", + inputSet.size(), + operandSet.size()); + return null; + } + + Map sourceColumnMap = new HashMap<>(); + Iterator iterator = optimizeSourceColumnSet(inputSet).iterator(); + operandSet.forEach(e -> sourceColumnMap.put(e, iterator.next())); + LOG.debug("sourceColumnMap: {}", sourceColumnMap); + + matcher = pattern.matcher(transform); + String temp; + while (matcher.find()) { + temp = matcher.group(); + transform = transform.replace(temp, sourceColumnMap.get(temp)); + } + + // temporary special treatment + transform = transform.replace("_UTF-16LE", ""); + LOG.debug("transform: {}", transform); + return transform; + } + + /** + * Increase the readability of transform. + * if catalog, database and table are the same, return field. + * If the catalog and database are the same, return the table and field. + * If the catalog is the same, return the database, table, field. + * Otherwise, return all + */ + private Set optimizeSourceColumnSet(Set inputSet) { + Set catalogSet = new HashSet<>(); + Set databaseSet = new HashSet<>(); + Set tableSet = new HashSet<>(); + Set> qualifiedSet = new LinkedHashSet<>(); + for (RelColumnOrigin rco : inputSet) { + RelOptTable originTable = rco.getOriginTable(); + List qualifiedName = originTable.getQualifiedName(); + + // catalog,database,table,field + List qualifiedList = new ArrayList<>(qualifiedName); + catalogSet.add(qualifiedName.get(0)); + databaseSet.add(qualifiedName.get(1)); + tableSet.add(qualifiedName.get(2)); + + String field = rco.getTransform() != null + ? rco.getTransform() + : originTable.getRowType().getFieldNames().get(rco.getOriginColumnOrdinal()); + qualifiedList.add(field); + qualifiedSet.add(qualifiedList); + } + if (catalogSet.size() == 1 && databaseSet.size() == 1 && tableSet.size() == 1) { + return optimizeName(qualifiedSet, e -> e.get(3)); + } else if (catalogSet.size() == 1 && databaseSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(2, 4))); + } else if (catalogSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(1, 4))); + } else { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e)); + } + } + + private Set optimizeName(Set> qualifiedSet, Function, String> mapper) { + return qualifiedSet.stream().map(mapper).collect(Collectors.toSet()); + } + private Set getMultipleColumns(RexNode rexNode, RelNode input, final RelMetadataQuery mq) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); final RexVisitor visitor = new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { Set inputSet = mq.getColumnOrigins(input, inputRef.getIndex()); if (inputSet != null) { diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index be1682c8a4..022897a9ab 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -22,7 +22,6 @@ import org.dinky.assertion.Asserts; import org.dinky.data.model.LineageRel; import org.dinky.data.result.SqlExplainResult; -import org.dinky.utils.FlinkStreamProgramWithoutPhysical; import org.dinky.utils.LineageContext; import org.apache.flink.api.common.RuntimeExecutionMode; @@ -68,7 +67,6 @@ import org.apache.flink.table.operations.ddl.CreateTableASOperation; import org.apache.flink.table.operations.ddl.CreateTableOperation; import org.apache.flink.table.planner.delegation.DefaultExecutor; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; import org.apache.flink.table.typeutils.FieldInfoUtils; import org.apache.flink.types.Row; @@ -94,8 +92,6 @@ */ public class CustomTableEnvironmentImpl extends AbstractCustomTableEnvironment { - private final FlinkChainedProgram flinkChainedProgram; - public CustomTableEnvironmentImpl( CatalogManager catalogManager, ModuleManager moduleManager, @@ -117,8 +113,6 @@ public CustomTableEnvironmentImpl( isStreamingMode, userClassLoader)); this.executor = executor; - this.flinkChainedProgram = - FlinkStreamProgramWithoutPhysical.buildProgram((Configuration) executionEnvironment.getConfiguration()); } public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment) { @@ -365,8 +359,7 @@ public void createTemporaryView(String path, DataStream dataStream, Strin @Override public List getLineage(String statement) { - LineageContext lineageContext = - new LineageContext(flinkChainedProgram, (TableEnvironmentImpl) streamTableEnvironment); + LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); return lineageContext.getLineage(statement); } diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java deleted file mode 100644 index 60755f0700..0000000000 --- a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.utils; - -import org.apache.calcite.plan.Convention; -import org.apache.calcite.plan.hep.HepMatchOrder; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.api.config.OptimizerConfigOptions; -import org.apache.flink.table.planner.plan.nodes.FlinkConventions; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkDecorrelateProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkVolcanoProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.HEP_RULES_EXECUTION_TYPE; -import org.apache.flink.table.planner.plan.rules.FlinkStreamRuleSets; - -/** - * FlinkStreamProgramWithoutPhysical - * - * @since 2022/8/20 23:33 - */ -public class FlinkStreamProgramWithoutPhysical { - - private static final String SUBQUERY_REWRITE = "subquery_rewrite"; - private static final String TEMPORAL_JOIN_REWRITE = "temporal_join_rewrite"; - private static final String DECORRELATE = "decorrelate"; - private static final String DEFAULT_REWRITE = "default_rewrite"; - private static final String PREDICATE_PUSHDOWN = "predicate_pushdown"; - private static final String JOIN_REORDER = "join_reorder"; - private static final String PROJECT_REWRITE = "project_rewrite"; - private static final String LOGICAL = "logical"; - private static final String LOGICAL_REWRITE = "logical_rewrite"; - - public static FlinkChainedProgram buildProgram(Configuration config) { - FlinkChainedProgram chainedProgram = new FlinkChainedProgram(); - - // rewrite sub-queries to joins - chainedProgram.addLast( - SUBQUERY_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite QueryOperationCatalogViewTable before rewriting sub-queries - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references before rewriting sub-queries to" + " semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.SEMI_JOIN_RULES()) - .build(), - "rewrite sub-queries to semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_SUBQUERY_RULES()) - .build(), - "sub-queries remove") - // convert RelOptTableImpl (which exists in SubQuery before) to - // FlinkRelOptTable - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references after sub-queries removed") - .build()); - - // rewrite special temporal join plan - chainedProgram.addLast( - TEMPORAL_JOIN_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.EXPAND_PLAN_RULES()) - .build(), - "convert correlate to temporal table join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.POST_EXPAND_CLEAN_UP_RULES()) - .build(), - "convert enumerable table scan") - .build()); - - // query decorrelation - chainedProgram.addLast( - DECORRELATE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite before decorrelation - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRE_DECORRELATION_RULES()) - .build(), - "pre-rewrite before decorrelation") - .addProgram(new FlinkDecorrelateProgram(), "") - .build()); - - // default rewrite, includes: predicate simplification, expression reduction, window - // properties rewrite, etc. - chainedProgram.addLast( - DEFAULT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.DEFAULT_REWRITE_RULES()) - .build()); - - // rule based optimization: push down predicate(s) in where clause, so it only needs to read - // the required data - chainedProgram.addLast( - PREDICATE_PUSHDOWN, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.FILTER_PREPARE_RULES()) - .build(), - "filter rules") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.FILTER_TABLESCAN_PUSHDOWN_RULES()) - .build(), - "push predicate into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRUNE_EMPTY_RULES()) - .build(), - "prune empty after predicate push down") - .build()); - - // join reorder - if (config.getBoolean(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED)) { - chainedProgram.addLast( - JOIN_REORDER, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_PREPARE_RULES()) - .build(), - "merge join into MultiJoin") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_RULES()) - .build(), - "do join reorder") - .build()); - } - - // project rewrite - chainedProgram.addLast( - PROJECT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PROJECT_RULES()) - .build()); - - // optimize the logical plan - chainedProgram.addLast( - LOGICAL, - FlinkVolcanoProgramBuilder.newBuilder() - .add(FlinkStreamRuleSets.LOGICAL_OPT_RULES()) - .setRequiredOutputTraits(new Convention.Impl[] {FlinkConventions.LOGICAL()}) - .build()); - - // logical rewrite - chainedProgram.addLast( - LOGICAL_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.LOGICAL_REWRITE()) - .build()); - - return chainedProgram; - } -} diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java index d8ca8d6944..577c515bc7 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java @@ -27,22 +27,13 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; -import org.apache.flink.table.catalog.CatalogManager; -import org.apache.flink.table.catalog.FunctionCatalog; import org.apache.flink.table.operations.CatalogSinkModifyOperation; import org.apache.flink.table.operations.Operation; -import org.apache.flink.table.planner.calcite.FlinkRelBuilder; -import org.apache.flink.table.planner.calcite.SqlExprToRexConverterFactory; -import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.StreamOptimizeContext; import org.apache.flink.table.planner.plan.schema.TableSourceTable; -import org.apache.flink.table.planner.plan.trait.MiniBatchInterval; import java.util.ArrayList; import java.util.List; @@ -55,11 +46,9 @@ */ public class LineageContext { - private final FlinkChainedProgram flinkChainedProgram; private final TableEnvironmentImpl tableEnv; - public LineageContext(FlinkChainedProgram flinkChainedProgram, TableEnvironmentImpl tableEnv) { - this.flinkChainedProgram = flinkChainedProgram; + public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } @@ -69,11 +58,8 @@ public List getLineage(String statement) { String sinkTable = parsed.getField(0); RelNode oriRelNode = parsed.getField(1); - // 2. Optimize original relNode to generate Optimized Logical Plan - RelNode optRelNode = optimize(oriRelNode); - - // 3. Build lineage based from RelMetadataQuery - return buildFiledLineageResult(sinkTable, optRelNode); + // 2. Build lineage based from RelMetadataQuery + return buildFiledLineageResult(sinkTable, oriRelNode); } private Tuple2 parseStatement(String sql) { @@ -94,66 +80,6 @@ private Tuple2 parseStatement(String sql) { } } - /** Calling each program's optimize method in sequence. */ - private RelNode optimize(RelNode relNode) { - return flinkChainedProgram.optimize(relNode, new StreamOptimizeContext() { - - @Override - public boolean isBatchMode() { - return false; - } - - @Override - public TableConfig getTableConfig() { - return tableEnv.getConfig(); - } - - @Override - public FunctionCatalog getFunctionCatalog() { - return getPlanner().getFlinkContext().getFunctionCatalog(); - } - - @Override - public CatalogManager getCatalogManager() { - return tableEnv.getCatalogManager(); - } - - @Override - public SqlExprToRexConverterFactory getSqlExprToRexConverterFactory() { - return getPlanner().getFlinkContext().getSqlExprToRexConverterFactory(); - } - - @Override - public C unwrap(Class clazz) { - return getPlanner().getFlinkContext().unwrap(clazz); - } - - @Override - public FlinkRelBuilder getFlinkRelBuilder() { - return getPlanner().getRelBuilder(); - } - - @Override - public boolean needFinalTimeIndicatorConversion() { - return true; - } - - @Override - public boolean isUpdateBeforeRequired() { - return false; - } - - @Override - public MiniBatchInterval getMiniBatchInterval() { - return MiniBatchInterval.NONE; - } - - private PlannerBase getPlanner() { - return (PlannerBase) tableEnv.getPlanner(); - } - }); - } - /** Check the size of query and sink fields match */ private void validateSchema(String sinkTable, RelNode relNode, List sinkFieldList) { List queryFieldList = relNode.getRowType().getFieldNames(); @@ -197,7 +123,8 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe String sourceColumn = fieldNames.get(ordinal); // add record - resultList.add(LineageRel.build(sourceTable, sourceColumn, sinkTable, targetColumn)); + resultList.add(LineageRel.build( + sourceTable, sourceColumn, sinkTable, targetColumn, relColumnOrigin.getTransform())); } } } diff --git a/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java new file mode 100644 index 0000000000..fa22d6eec6 --- /dev/null +++ b/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java @@ -0,0 +1,112 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import static org.junit.Assert.assertEquals; + +import org.dinky.data.model.LineageRel; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.EnvironmentSettings; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableEnvironmentImpl; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * @description: LineageContextTest + * @author: HamaWhite + */ +public class LineageContextTest { + + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; + + @BeforeClass + public static void setUp() { + StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(new Configuration()); + + EnvironmentSettings settings = + EnvironmentSettings.newInstance().inStreamingMode().build(); + tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + + context = new LineageContext(tableEnv); + } + + @Before + public void init() { + // create table ST + tableEnv.executeSql("DROP TABLE IF EXISTS ST"); + tableEnv.executeSql("CREATE TABLE ST ( " + " a STRING ," + + " b STRING ," + + " c STRING " + + ") WITH ( " + + " 'connector' = 'datagen' ," + + " 'rows-per-second' = '1' " + + ")"); + + // create table TT + tableEnv.executeSql("DROP TABLE IF EXISTS TT"); + tableEnv.executeSql("CREATE TABLE TT ( " + " A STRING ," + + " B STRING " + + ") WITH ( " + + " 'connector' = 'print' " + + ")"); + } + + @Test + public void testGetLineage() { + List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + String[][] expectedArray = { + {"ST", "a", "TT", "A", "||(a, c)"}, + {"ST", "c", "TT", "A", "||(a, c)"}, + {"ST", "b", "TT", "B", "||(b, c)"}, + {"ST", "c", "TT", "B", "||(b, c)"} + }; + + List expectedList = buildResult(expectedArray); + assertEquals(expectedList, actualList); + } + + private List buildResult(String[][] expectedArray) { + return Stream.of(expectedArray) + .map(e -> { + String transform = e.length == 5 ? e[4] : null; + return new LineageRel( + "default_catalog", + "default_database", + e[0], + e[1], + "default_catalog", + "default_database", + e[2], + e[3], + transform); + }) + .collect(Collectors.toList()); + } +} diff --git a/dinky-client/dinky-client-1.15/pom.xml b/dinky-client/dinky-client-1.15/pom.xml index d614979679..003f87ff90 100644 --- a/dinky-client/dinky-client-1.15/pom.xml +++ b/dinky-client/dinky-client-1.15/pom.xml @@ -67,6 +67,11 @@ activation 1.1.1 + + junit + junit + test + diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java b/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java new file mode 100644 index 0000000000..5fc8dc24cb --- /dev/null +++ b/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java @@ -0,0 +1,116 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.calcite.rel.metadata; + +import org.apache.calcite.plan.RelOptTable; + +/** + * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelColumnOrigin + *

      + * Modification point: + *

        + *
      1. add transform field and related code. + *
      + * + * @description: RelColumnOrigin is a data structure describing one of the origins of an + * output column produced by a relational expression. + * @author: HamaWhite + */ +public class RelColumnOrigin { + // ~ Instance fields -------------------------------------------------------- + + private final RelOptTable originTable; + + private final int iOriginColumn; + + private final boolean isDerived; + + /** + * Stores the expression for data conversion, + * which source table fields are transformed by which expression the target field + */ + private String transform; + + // ~ Constructors ----------------------------------------------------------- + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + } + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived, String transform) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + this.transform = transform; + } + + // ~ Methods ---------------------------------------------------------------- + + /** + * Returns table of origin. + */ + public RelOptTable getOriginTable() { + return originTable; + } + + /** + * Returns the 0-based index of column in origin table; whether this ordinal + * is flattened or unflattened depends on whether UDT flattening has already + * been performed on the relational expression which produced this + * description. + */ + public int getOriginColumnOrdinal() { + return iOriginColumn; + } + + /** + * Consider the query select a+b as c, d as e from t. The + * output column c has two origins (a and b), both of them derived. The + * output column d as one origin (c), which is not derived. + * + * @return false if value taken directly from column in origin table; true + * otherwise + */ + public boolean isDerived() { + return isDerived; + } + + public String getTransform() { + return transform; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RelColumnOrigin)) { + return false; + } + RelColumnOrigin other = (RelColumnOrigin) obj; + return originTable.getQualifiedName().equals(other.originTable.getQualifiedName()) + && (iOriginColumn == other.iOriginColumn) + && (isDerived == other.isDerived); + } + + @Override + public int hashCode() { + return originTable.getQualifiedName().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + } +} diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java b/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java index 6bdad4d186..5c8aae002a 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java @@ -36,7 +36,7 @@ import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.TableFunctionScan; import org.apache.calcite.rel.core.TableModify; -import org.apache.calcite.rel.core.Window; +import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexFieldAccess; @@ -48,33 +48,47 @@ import org.apache.calcite.rex.RexVisitor; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.util.BuiltInMethod; -import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelMdColumnOrigins * - *

      Modification point: 1. Support lookup join, add method getColumnOrigins(Snapshot - * rel,RelMetadataQuery mq, int iOutputColumn) 2. Support watermark, add method - * getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) 3. Support table function, - * add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) 4. Support - * field AS LOCALTIMESTAMP, modify method getColumnOrigins(Calc rel, RelMetadataQuery mq, int - * iOutputColumn) 5. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int - * iOutputColumn) 6. Support ROW_NUMBER(), add method getColumnOrigins(Window rel, RelMetadataQuery - * mq, int iOutputColumn)* + *

      Modification point: + *

        + *
      1. Support lookup join, add method getColumnOrigins(Snapshot rel,RelMetadataQuery mq, int iOutputColumn) + *
      2. Support watermark, add method getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) + *
      3. Support table function, add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) + *
      4. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) + *
      5. Support transform, add method createDerivedColumnOrigins(Set inputSet, String transform, boolean originTransform), and related code + *
      6. Support field AS LOCALTIMESTAMP, modify method getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
      7. Support PROCTIME() is the first filed, add method computeIndexWithOffset, used by getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
          * - * @description: RelMdColumnOrigins supplies a default implementation of {@link - * RelMetadataQuery#getColumnOrigins} for the standard logical algebra. - * @version: 1.0.0 + * @description: RelMdColumnOrigins supplies a default implementation of {@link RelMetadataQuery#getColumnOrigins} for the standard logical algebra. + * @author: HamaWhite */ public class RelMdColumnOrigins implements MetadataHandler { + private static final Logger LOG = LoggerFactory.getLogger(RelMdColumnOrigins.class); + + public static final String DELIMITER = "."; + public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider.reflectiveSource( BuiltInMethod.COLUMN_ORIGIN.method, new RelMdColumnOrigins()); @@ -98,10 +112,10 @@ public Set getColumnOrigins(Aggregate rel, RelMetadataQuery mq, // Aggregate columns are derived from input columns AggregateCall call = rel.getAggCallList().get(iOutputColumn - rel.getGroupCount()); - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (Integer iInput : call.getArgList()) { Set inputSet = mq.getColumnOrigins(rel.getInput(), iInput); - inputSet = createDerivedColumnOrigins(inputSet); + inputSet = createDerivedColumnOrigins(inputSet, call.toString(), true); if (inputSet != null) { set.addAll(inputSet); } @@ -132,7 +146,9 @@ public Set getColumnOrigins(Join rel, RelMetadataQuery mq, int return set; } - /** Support the field blood relationship of table function */ + /** + * Support the field blood relationship of table function + */ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) { List leftFieldList = rel.getLeft().getRowType().getFieldList(); @@ -142,68 +158,126 @@ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, if (iOutputColumn < nLeftColumns) { set = mq.getColumnOrigins(rel.getLeft(), iOutputColumn); } else { - // get the field name of the left table configured in the Table Function on the right - TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); - RexCall rexCall = (RexCall) tableFunctionScan.getCall(); - // support only one field in table function - RexFieldAccess rexFieldAccess = (RexFieldAccess) rexCall.operands.get(0); - String fieldName = rexFieldAccess.getField().getName(); - - int leftFieldIndex = 0; - for (int i = 0; i < nLeftColumns; i++) { - if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { - leftFieldIndex = i; - break; + if (rel.getRight() instanceof TableFunctionScan) { + // get the field name of the left table configured in the Table Function on the right + TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); + RexCall rexCall = (RexCall) tableFunctionScan.getCall(); + // support only one field in table function + RexFieldAccess rexFieldAccess = + (RexFieldAccess) rexCall.getOperands().get(0); + String fieldName = rexFieldAccess.getField().getName(); + + int leftFieldIndex = 0; + for (int i = 0; i < nLeftColumns; i++) { + if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { + leftFieldIndex = i; + break; + } } + /** + * Get the fields from the left table, don't go to + * getColumnOrigins(TableFunctionScan rel,RelMetadataQuery mq, int iOutputColumn), + * otherwise the return is null, and the UDTF field origin cannot be parsed + */ + set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); + + // process transform for udtf + String transform = rexCall.toString().replace(rexFieldAccess.toString(), fieldName) + + DELIMITER + + tableFunctionScan.getRowType().getFieldNames().get(iOutputColumn - nLeftColumns); + set = createDerivedColumnOrigins(set, transform, false); + } else { + set = mq.getColumnOrigins(rel.getRight(), iOutputColumn - nLeftColumns); } - /** - * Get the fields from the left table, don't go to getColumnOrigins(TableFunctionScan - * rel,RelMetadataQuery mq, int iOutputColumn), otherwise the return is null, and the - * UDTF field origin cannot be parsed - */ - set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); } return set; } public Set getColumnOrigins(SetOp rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelNode input : rel.getInputs()) { Set inputSet = mq.getColumnOrigins(input, iOutputColumn); if (inputSet == null) { - return null; + return Collections.emptySet(); } set.addAll(inputSet); } return set; } - /** Support the field blood relationship of lookup join */ + /** + * Support the field blood relationship of lookup join + */ public Set getColumnOrigins(Snapshot rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } - /** Support the field blood relationship of watermark */ + /** + * Support the field blood relationship of watermark + */ public Set getColumnOrigins(SingleRel rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } /** - * Support field blood relationship of CEP. The first column is the field after PARTITION BY, - * and the other columns come from the measures in Match + * Support for new fields in the source table similar to those created with the LOCALTIMESTAMP function + */ + public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { + final RelNode input = rel.getInput(); + RexNode rexNode = rel.getProjects().get(iOutputColumn); + + if (rexNode instanceof RexInputRef) { + // Direct reference: no derivation added. + RexInputRef inputRef = (RexInputRef) rexNode; + int index = inputRef.getIndex(); + if (input instanceof TableScan) { + index = computeIndexWithOffset(rel.getProjects(), inputRef.getIndex(), iOutputColumn); + } + return mq.getColumnOrigins(input, index); + } else if (input instanceof TableScan + && rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty()) { + return mq.getColumnOrigins(input, iOutputColumn); + } + // Anything else is a derivation, possibly from multiple columns. + final Set set = getMultipleColumns(rexNode, input, mq); + return createDerivedColumnOrigins(set, rexNode.toString(), true); + } + + private int computeIndexWithOffset(List projects, int baseIndex, int iOutputColumn) { + int offset = 0; + for (int index = 0; index < iOutputColumn; index++) { + RexNode rexNode = projects.get(index); + if ((rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty())) { + offset += 1; + } + } + return baseIndex + offset; + } + + /** + * Support field blood relationship of CEP. + * The first column is the field after PARTITION BY, and the other columns come from the measures in Match */ public Set getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) { - if (iOutputColumn == 0) { + int orderCount = rel.getOrderKeys().getKeys().size(); + + if (iOutputColumn < orderCount) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } final RelNode input = rel.getInput(); - RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - 1); + RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - orderCount); RexPatternFieldRef rexPatternFieldRef = searchRexPatternFieldRef(rexNode); if (rexPatternFieldRef != null) { - return mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + final Set set = mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + String originTransform = rexNode instanceof RexCall + ? ((RexCall) rexNode).getOperands().get(0).toString() + : null; + return createDerivedColumnOrigins(set, originTransform, true); } - return null; + return Collections.emptySet(); } private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { @@ -219,46 +293,6 @@ private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { return null; } - /** Support the field blood relationship of ROW_NUMBER() */ - public Set getColumnOrigins(Window rel, RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - /** - * Haven't found a good way to judge whether the field comes from window, for the time - * being, first judge by parsing the string - */ - String fieldName = rel.getRowType().getFieldNames().get(iOutputColumn); - // for example: "w1$o0" - if (fieldName.startsWith("w") && fieldName.contains("$")) { - int groupIndex = Integer.parseInt(fieldName.substring(1, fieldName.indexOf("$"))); - final Set set = new LinkedHashSet<>(); - if (!rel.groups.isEmpty()) { - Window.Group group = rel.groups.get(groupIndex); - // process partition by keys - group.keys.asList().forEach(index -> set.addAll(mq.getColumnOrigins(input, index))); - // process order by keys - group.orderKeys - .getFieldCollations() - .forEach(e -> set.addAll(mq.getColumnOrigins(input, e.getFieldIndex()))); - } - return set; - } - return mq.getColumnOrigins(rel.getInput(), iOutputColumn); - } - - public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - RexNode rexNode = rel.getProjects().get(iOutputColumn); - - if (rexNode instanceof RexInputRef) { - // Direct reference: no derivation added. - RexInputRef inputRef = (RexInputRef) rexNode; - return mq.getColumnOrigins(input, inputRef.getIndex()); - } - // Anything else is a derivation, possibly from multiple columns. - final Set set = getMultipleColumns(rexNode, input, mq); - return createDerivedColumnOrigins(set); - } - public Set getColumnOrigins(Calc rel, final RelMetadataQuery mq, int iOutputColumn) { final RelNode input = rel.getInput(); final RexShuttle rexShuttle = new RexShuttle() { @@ -277,30 +311,6 @@ public RexNode visitLocalRef(RexLocalRef localRef) { // Direct reference: no derivation added. RexInputRef inputRef = (RexInputRef) rexNode; return mq.getColumnOrigins(input, inputRef.getIndex()); - } else if (rexNode instanceof RexCall && ((RexCall) rexNode).operands.isEmpty()) { - // support for new fields in the source table similar to those created with the - // LOCALTIMESTAMP function - TableSourceTable table = ((TableSourceTable) rel.getInput().getTable()); - if (table != null) { - String targetFieldName = rel.getProgram() - .getOutputRowType() - .getFieldList() - .get(iOutputColumn) - .getName(); - List fieldList = - table.contextResolvedTable().getResolvedSchema().getColumnNames(); - - int index = -1; - for (int i = 0; i < fieldList.size(); i++) { - if (fieldList.get(i).equalsIgnoreCase(targetFieldName)) { - index = i; - break; - } - } - if (index != -1) { - return Collections.singleton(new RelColumnOrigin(table, index, false)); - } - } } // Anything else is a derivation, possibly from multiple columns. final Set set = getMultipleColumns(rexNode, input, mq); @@ -324,14 +334,14 @@ public Set getColumnOrigins(Exchange rel, RelMetadataQuery mq, } public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); Set mappings = rel.getColumnMappings(); if (mappings == null) { - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // This is a non-leaf transformation: say we don't // know about origins, because there are probably // columns below. - return null; + return Collections.emptySet(); } else { // This is a leaf transformation: say there are fer sure no // column origins. @@ -346,7 +356,7 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ final int column = mapping.iInputColumn; Set origins = mq.getColumnOrigins(input, column); if (origins == null) { - return null; + return Collections.emptySet(); } if (mapping.derived) { origins = createDerivedColumnOrigins(origins); @@ -357,18 +367,19 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ } // Catch-all rule when none of the others apply. + @SuppressWarnings("squid:S1172") public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, int iOutputColumn) { // NOTE jvs 28-Mar-2006: We may get this wrong for a physical table // expression which supports projections. In that case, // it's up to the plugin writer to override with the // correct information. - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // No generic logic available for non-leaf rels. - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); RelOptTable table = rel.getTable(); if (table == null) { @@ -383,7 +394,7 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i // names.) This detection assumes the table expression doesn't handle // rename as well. if (table.getRowType() != rel.getRowType()) { - return null; + return Collections.emptySet(); } set.add(new RelColumnOrigin(table, iOutputColumn, false)); @@ -392,9 +403,9 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i private Set createDerivedColumnOrigins(Set inputSet) { if (inputSet == null) { - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelColumnOrigin rco : inputSet) { RelColumnOrigin derived = new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true); set.add(derived); @@ -402,10 +413,113 @@ private Set createDerivedColumnOrigins(Set inp return set; } + private Set createDerivedColumnOrigins( + Set inputSet, String transform, boolean originTransform) { + if (inputSet == null || inputSet.isEmpty()) { + return Collections.emptySet(); + } + final Set set = new LinkedHashSet<>(); + + String finalTransform = originTransform ? computeTransform(inputSet, transform) : transform; + for (RelColumnOrigin rco : inputSet) { + RelColumnOrigin derived = + new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true, finalTransform); + set.add(derived); + } + return set; + } + + /** + * Replace the variable at the beginning of $ in input with the real field information + */ + private String computeTransform(Set inputSet, String transform) { + LOG.debug("origin transform: {}", transform); + Pattern pattern = Pattern.compile("\\$\\d+"); + Matcher matcher = pattern.matcher(transform); + + Set operandSet = new LinkedHashSet<>(); + while (matcher.find()) { + operandSet.add(matcher.group()); + } + + if (operandSet.isEmpty()) { + LOG.info("operandSet is empty"); + return null; + } + if (inputSet.size() != operandSet.size()) { + LOG.warn( + "The number [{}] of fields in the source tables are not equal to operands [{}]", + inputSet.size(), + operandSet.size()); + return null; + } + + Map sourceColumnMap = new HashMap<>(); + Iterator iterator = optimizeSourceColumnSet(inputSet).iterator(); + operandSet.forEach(e -> sourceColumnMap.put(e, iterator.next())); + LOG.debug("sourceColumnMap: {}", sourceColumnMap); + + matcher = pattern.matcher(transform); + String temp; + while (matcher.find()) { + temp = matcher.group(); + transform = transform.replace(temp, sourceColumnMap.get(temp)); + } + + // temporary special treatment + transform = transform.replace("_UTF-16LE", ""); + LOG.debug("transform: {}", transform); + return transform; + } + + /** + * Increase the readability of transform. + * if catalog, database and table are the same, return field. + * If the catalog and database are the same, return the table and field. + * If the catalog is the same, return the database, table, field. + * Otherwise, return all + */ + private Set optimizeSourceColumnSet(Set inputSet) { + Set catalogSet = new HashSet<>(); + Set databaseSet = new HashSet<>(); + Set tableSet = new HashSet<>(); + Set> qualifiedSet = new LinkedHashSet<>(); + for (RelColumnOrigin rco : inputSet) { + RelOptTable originTable = rco.getOriginTable(); + List qualifiedName = originTable.getQualifiedName(); + + // catalog,database,table,field + List qualifiedList = new ArrayList<>(qualifiedName); + catalogSet.add(qualifiedName.get(0)); + databaseSet.add(qualifiedName.get(1)); + tableSet.add(qualifiedName.get(2)); + + String field = rco.getTransform() != null + ? rco.getTransform() + : originTable.getRowType().getFieldNames().get(rco.getOriginColumnOrdinal()); + qualifiedList.add(field); + qualifiedSet.add(qualifiedList); + } + if (catalogSet.size() == 1 && databaseSet.size() == 1 && tableSet.size() == 1) { + return optimizeName(qualifiedSet, e -> e.get(3)); + } else if (catalogSet.size() == 1 && databaseSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(2, 4))); + } else if (catalogSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(1, 4))); + } else { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e)); + } + } + + private Set optimizeName(Set> qualifiedSet, Function, String> mapper) { + return qualifiedSet.stream().map(mapper).collect(Collectors.toSet()); + } + private Set getMultipleColumns(RexNode rexNode, RelNode input, final RelMetadataQuery mq) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); final RexVisitor visitor = new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { Set inputSet = mq.getColumnOrigins(input, inputRef.getIndex()); if (inputSet != null) { diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 23eb46eadb..a3320dd770 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -24,7 +24,6 @@ import org.dinky.assertion.Asserts; import org.dinky.data.model.LineageRel; import org.dinky.data.result.SqlExplainResult; -import org.dinky.utils.FlinkStreamProgramWithoutPhysical; import org.dinky.utils.LineageContext; import org.apache.flink.api.common.RuntimeExecutionMode; @@ -64,7 +63,6 @@ import org.apache.flink.table.operations.command.SetOperation; import org.apache.flink.table.operations.ddl.CreateTableASOperation; import org.apache.flink.table.operations.ddl.CreateTableOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; import org.apache.flink.types.Row; import java.util.ArrayList; @@ -85,8 +83,6 @@ */ public class CustomTableEnvironmentImpl extends AbstractCustomTableEnvironment { - private final FlinkChainedProgram flinkChainedProgram; - public CustomTableEnvironmentImpl( CatalogManager catalogManager, ModuleManager moduleManager, @@ -107,8 +103,6 @@ public CustomTableEnvironmentImpl( executor, isStreamingMode, userClassLoader)); - this.flinkChainedProgram = FlinkStreamProgramWithoutPhysical.buildProgram( - (Configuration) getStreamExecutionEnvironment().getConfiguration()); } public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment) { @@ -324,8 +318,7 @@ public void createTemporaryView(String path, DataStream dataStream, Strin @Override public List getLineage(String statement) { - LineageContext lineageContext = - new LineageContext(flinkChainedProgram, (TableEnvironmentImpl) streamTableEnvironment); + LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); return lineageContext.getLineage(statement); } diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java deleted file mode 100644 index bf4808939d..0000000000 --- a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.utils; - -import org.apache.calcite.plan.Convention; -import org.apache.calcite.plan.hep.HepMatchOrder; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.api.config.OptimizerConfigOptions; -import org.apache.flink.table.planner.plan.nodes.FlinkConventions; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkDecorrelateProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkVolcanoProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.HEP_RULES_EXECUTION_TYPE; -import org.apache.flink.table.planner.plan.rules.FlinkStreamRuleSets; - -/** - * FlinkStreamProgramWithoutPhysical - * - * @since 2022/11/22 - */ -public class FlinkStreamProgramWithoutPhysical { - - private static final String SUBQUERY_REWRITE = "subquery_rewrite"; - private static final String TEMPORAL_JOIN_REWRITE = "temporal_join_rewrite"; - private static final String DECORRELATE = "decorrelate"; - private static final String DEFAULT_REWRITE = "default_rewrite"; - private static final String PREDICATE_PUSHDOWN = "predicate_pushdown"; - private static final String JOIN_REORDER = "join_reorder"; - private static final String PROJECT_REWRITE = "project_rewrite"; - private static final String LOGICAL = "logical"; - private static final String LOGICAL_REWRITE = "logical_rewrite"; - - public static FlinkChainedProgram buildProgram(Configuration config) { - FlinkChainedProgram chainedProgram = new FlinkChainedProgram(); - - // rewrite sub-queries to joins - chainedProgram.addLast( - SUBQUERY_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite QueryOperationCatalogViewTable before rewriting sub-queries - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references before rewriting sub-queries to" + " semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.SEMI_JOIN_RULES()) - .build(), - "rewrite sub-queries to semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_SUBQUERY_RULES()) - .build(), - "sub-queries remove") - // convert RelOptTableImpl (which exists in SubQuery before) to - // FlinkRelOptTable - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references after sub-queries removed") - .build()); - - // rewrite special temporal join plan - chainedProgram.addLast( - TEMPORAL_JOIN_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.EXPAND_PLAN_RULES()) - .build(), - "convert correlate to temporal table join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.POST_EXPAND_CLEAN_UP_RULES()) - .build(), - "convert enumerable table scan") - .build()); - - // query decorrelation - chainedProgram.addLast( - DECORRELATE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite before decorrelation - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRE_DECORRELATION_RULES()) - .build(), - "pre-rewrite before decorrelation") - .addProgram(new FlinkDecorrelateProgram(), "") - .build()); - - // default rewrite, includes: predicate simplification, expression reduction, window - // properties rewrite, etc. - chainedProgram.addLast( - DEFAULT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.DEFAULT_REWRITE_RULES()) - .build()); - - // rule based optimization: push down predicate(s) in where clause, so it only needs to read - // the required data - chainedProgram.addLast( - PREDICATE_PUSHDOWN, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_PREDICATE_REWRITE_RULES()) - .build(), - "join predicate rewrite") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.FILTER_PREPARE_RULES()) - .build(), - "filter rules") - .setIterations(5) - .build(), - "predicate rewrite") - .addProgram( - // PUSH_PARTITION_DOWN_RULES should always be in front of - // PUSH_FILTER_DOWN_RULES - // to prevent PUSH_FILTER_DOWN_RULES from consuming the predicates - // in partitions - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PUSH_PARTITION_DOWN_RULES()) - .build(), - "push down partitions into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PUSH_FILTER_DOWN_RULES()) - .build(), - "push down filters into table scan") - .build(), - "push predicate into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRUNE_EMPTY_RULES()) - .build(), - "prune empty after predicate push down") - .build()); - - // join reorder - if (config.getBoolean(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED)) { - chainedProgram.addLast( - JOIN_REORDER, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_PREPARE_RULES()) - .build(), - "merge join into MultiJoin") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_RULES()) - .build(), - "do join reorder") - .build()); - } - - // project rewrite - chainedProgram.addLast( - PROJECT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PROJECT_RULES()) - .build()); - - // optimize the logical plan - chainedProgram.addLast( - LOGICAL, - FlinkVolcanoProgramBuilder.newBuilder() - .add(FlinkStreamRuleSets.LOGICAL_OPT_RULES()) - .setRequiredOutputTraits(new Convention.Impl[] {FlinkConventions.LOGICAL()}) - .build()); - - // logical rewrite - chainedProgram.addLast( - LOGICAL_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.LOGICAL_REWRITE()) - .build()); - - return chainedProgram; - } -} diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java index 407ca3f78e..d707ade42a 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java @@ -27,23 +27,13 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; -import org.apache.flink.table.catalog.CatalogManager; -import org.apache.flink.table.catalog.FunctionCatalog; -import org.apache.flink.table.module.ModuleManager; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; -import org.apache.flink.table.planner.calcite.FlinkRelBuilder; -import org.apache.flink.table.planner.calcite.SqlExprToRexConverterFactory; -import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.StreamOptimizeContext; import org.apache.flink.table.planner.plan.schema.TableSourceTable; -import org.apache.flink.table.planner.plan.trait.MiniBatchInterval; import java.util.ArrayList; import java.util.List; @@ -56,11 +46,9 @@ */ public class LineageContext { - private final FlinkChainedProgram flinkChainedProgram; private final TableEnvironmentImpl tableEnv; - public LineageContext(FlinkChainedProgram flinkChainedProgram, TableEnvironmentImpl tableEnv) { - this.flinkChainedProgram = flinkChainedProgram; + public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } @@ -70,11 +58,8 @@ public List getLineage(String statement) { String sinkTable = parsed.getField(0); RelNode oriRelNode = parsed.getField(1); - // 2. Optimize original relNode to generate Optimized Logical Plan - RelNode optRelNode = optimize(oriRelNode); - - // 3. Build lineage based from RelMetadataQuery - return buildFiledLineageResult(sinkTable, optRelNode); + // 2. Build lineage based from RelMetadataQuery + return buildFiledLineageResult(sinkTable, oriRelNode); } private Tuple2 parseStatement(String sql) { @@ -96,76 +81,6 @@ private Tuple2 parseStatement(String sql) { } } - /** Calling each program's optimize method in sequence. */ - private RelNode optimize(RelNode relNode) { - return flinkChainedProgram.optimize(relNode, new StreamOptimizeContext() { - - @Override - public boolean isBatchMode() { - return false; - } - - @Override - public TableConfig getTableConfig() { - return tableEnv.getConfig(); - } - - @Override - public FunctionCatalog getFunctionCatalog() { - return getPlanner().getFlinkContext().getFunctionCatalog(); - } - - @Override - public CatalogManager getCatalogManager() { - return tableEnv.getCatalogManager(); - } - - @Override - public ModuleManager getModuleManager() { - return getPlanner().getFlinkContext().getModuleManager(); - } - - @Override - public SqlExprToRexConverterFactory getSqlExprToRexConverterFactory() { - return null; - } - - @Override - public C unwrap(Class clazz) { - return getPlanner().getFlinkContext().unwrap(clazz); - } - - @Override - public FlinkRelBuilder getFlinkRelBuilder() { - return getPlanner().getRelBuilder(); - } - - @Override - public boolean isUpdateBeforeRequired() { - return false; - } - - @Override - public MiniBatchInterval getMiniBatchInterval() { - return MiniBatchInterval.NONE; - } - - @Override - public boolean needFinalTimeIndicatorConversion() { - return true; - } - - @Override - public ClassLoader getClassLoader() { - return getPlanner().getFlinkContext().getClassLoader(); - } - - private PlannerBase getPlanner() { - return (PlannerBase) tableEnv.getPlanner(); - } - }); - } - /** Check the size of query and sink fields match */ private void validateSchema(String sinkTable, RelNode relNode, List sinkFieldList) { List queryFieldList = relNode.getRowType().getFieldNames(); @@ -209,7 +124,8 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe String sourceColumn = fieldNames.get(ordinal); // add record - resultList.add(LineageRel.build(sourceTable, sourceColumn, sinkTable, targetColumn)); + resultList.add(LineageRel.build( + sourceTable, sourceColumn, sinkTable, targetColumn, relColumnOrigin.getTransform())); } } } diff --git a/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java new file mode 100644 index 0000000000..fa22d6eec6 --- /dev/null +++ b/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java @@ -0,0 +1,112 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import static org.junit.Assert.assertEquals; + +import org.dinky.data.model.LineageRel; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.EnvironmentSettings; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableEnvironmentImpl; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * @description: LineageContextTest + * @author: HamaWhite + */ +public class LineageContextTest { + + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; + + @BeforeClass + public static void setUp() { + StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(new Configuration()); + + EnvironmentSettings settings = + EnvironmentSettings.newInstance().inStreamingMode().build(); + tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + + context = new LineageContext(tableEnv); + } + + @Before + public void init() { + // create table ST + tableEnv.executeSql("DROP TABLE IF EXISTS ST"); + tableEnv.executeSql("CREATE TABLE ST ( " + " a STRING ," + + " b STRING ," + + " c STRING " + + ") WITH ( " + + " 'connector' = 'datagen' ," + + " 'rows-per-second' = '1' " + + ")"); + + // create table TT + tableEnv.executeSql("DROP TABLE IF EXISTS TT"); + tableEnv.executeSql("CREATE TABLE TT ( " + " A STRING ," + + " B STRING " + + ") WITH ( " + + " 'connector' = 'print' " + + ")"); + } + + @Test + public void testGetLineage() { + List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + String[][] expectedArray = { + {"ST", "a", "TT", "A", "||(a, c)"}, + {"ST", "c", "TT", "A", "||(a, c)"}, + {"ST", "b", "TT", "B", "||(b, c)"}, + {"ST", "c", "TT", "B", "||(b, c)"} + }; + + List expectedList = buildResult(expectedArray); + assertEquals(expectedList, actualList); + } + + private List buildResult(String[][] expectedArray) { + return Stream.of(expectedArray) + .map(e -> { + String transform = e.length == 5 ? e[4] : null; + return new LineageRel( + "default_catalog", + "default_database", + e[0], + e[1], + "default_catalog", + "default_database", + e[2], + e[3], + transform); + }) + .collect(Collectors.toList()); + } +} diff --git a/dinky-client/dinky-client-1.16/pom.xml b/dinky-client/dinky-client-1.16/pom.xml index 770b7351cb..649b33bdec 100644 --- a/dinky-client/dinky-client-1.16/pom.xml +++ b/dinky-client/dinky-client-1.16/pom.xml @@ -41,6 +41,11 @@ com.sun.xml.bind jaxb-core + + junit + junit + provided + diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java b/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java new file mode 100644 index 0000000000..5fc8dc24cb --- /dev/null +++ b/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java @@ -0,0 +1,116 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.calcite.rel.metadata; + +import org.apache.calcite.plan.RelOptTable; + +/** + * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelColumnOrigin + *

          + * Modification point: + *

            + *
          1. add transform field and related code. + *
          + * + * @description: RelColumnOrigin is a data structure describing one of the origins of an + * output column produced by a relational expression. + * @author: HamaWhite + */ +public class RelColumnOrigin { + // ~ Instance fields -------------------------------------------------------- + + private final RelOptTable originTable; + + private final int iOriginColumn; + + private final boolean isDerived; + + /** + * Stores the expression for data conversion, + * which source table fields are transformed by which expression the target field + */ + private String transform; + + // ~ Constructors ----------------------------------------------------------- + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + } + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived, String transform) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + this.transform = transform; + } + + // ~ Methods ---------------------------------------------------------------- + + /** + * Returns table of origin. + */ + public RelOptTable getOriginTable() { + return originTable; + } + + /** + * Returns the 0-based index of column in origin table; whether this ordinal + * is flattened or unflattened depends on whether UDT flattening has already + * been performed on the relational expression which produced this + * description. + */ + public int getOriginColumnOrdinal() { + return iOriginColumn; + } + + /** + * Consider the query select a+b as c, d as e from t. The + * output column c has two origins (a and b), both of them derived. The + * output column d as one origin (c), which is not derived. + * + * @return false if value taken directly from column in origin table; true + * otherwise + */ + public boolean isDerived() { + return isDerived; + } + + public String getTransform() { + return transform; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RelColumnOrigin)) { + return false; + } + RelColumnOrigin other = (RelColumnOrigin) obj; + return originTable.getQualifiedName().equals(other.originTable.getQualifiedName()) + && (iOriginColumn == other.iOriginColumn) + && (isDerived == other.isDerived); + } + + @Override + public int hashCode() { + return originTable.getQualifiedName().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + } +} diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java b/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java index 6bdad4d186..5c8aae002a 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java @@ -36,7 +36,7 @@ import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.TableFunctionScan; import org.apache.calcite.rel.core.TableModify; -import org.apache.calcite.rel.core.Window; +import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexFieldAccess; @@ -48,33 +48,47 @@ import org.apache.calcite.rex.RexVisitor; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.util.BuiltInMethod; -import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelMdColumnOrigins * - *

          Modification point: 1. Support lookup join, add method getColumnOrigins(Snapshot - * rel,RelMetadataQuery mq, int iOutputColumn) 2. Support watermark, add method - * getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) 3. Support table function, - * add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) 4. Support - * field AS LOCALTIMESTAMP, modify method getColumnOrigins(Calc rel, RelMetadataQuery mq, int - * iOutputColumn) 5. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int - * iOutputColumn) 6. Support ROW_NUMBER(), add method getColumnOrigins(Window rel, RelMetadataQuery - * mq, int iOutputColumn)* + *

          Modification point: + *

            + *
          1. Support lookup join, add method getColumnOrigins(Snapshot rel,RelMetadataQuery mq, int iOutputColumn) + *
          2. Support watermark, add method getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) + *
          3. Support table function, add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) + *
          4. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) + *
          5. Support transform, add method createDerivedColumnOrigins(Set inputSet, String transform, boolean originTransform), and related code + *
          6. Support field AS LOCALTIMESTAMP, modify method getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
          7. Support PROCTIME() is the first filed, add method computeIndexWithOffset, used by getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
              * - * @description: RelMdColumnOrigins supplies a default implementation of {@link - * RelMetadataQuery#getColumnOrigins} for the standard logical algebra. - * @version: 1.0.0 + * @description: RelMdColumnOrigins supplies a default implementation of {@link RelMetadataQuery#getColumnOrigins} for the standard logical algebra. + * @author: HamaWhite */ public class RelMdColumnOrigins implements MetadataHandler { + private static final Logger LOG = LoggerFactory.getLogger(RelMdColumnOrigins.class); + + public static final String DELIMITER = "."; + public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider.reflectiveSource( BuiltInMethod.COLUMN_ORIGIN.method, new RelMdColumnOrigins()); @@ -98,10 +112,10 @@ public Set getColumnOrigins(Aggregate rel, RelMetadataQuery mq, // Aggregate columns are derived from input columns AggregateCall call = rel.getAggCallList().get(iOutputColumn - rel.getGroupCount()); - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (Integer iInput : call.getArgList()) { Set inputSet = mq.getColumnOrigins(rel.getInput(), iInput); - inputSet = createDerivedColumnOrigins(inputSet); + inputSet = createDerivedColumnOrigins(inputSet, call.toString(), true); if (inputSet != null) { set.addAll(inputSet); } @@ -132,7 +146,9 @@ public Set getColumnOrigins(Join rel, RelMetadataQuery mq, int return set; } - /** Support the field blood relationship of table function */ + /** + * Support the field blood relationship of table function + */ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) { List leftFieldList = rel.getLeft().getRowType().getFieldList(); @@ -142,68 +158,126 @@ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, if (iOutputColumn < nLeftColumns) { set = mq.getColumnOrigins(rel.getLeft(), iOutputColumn); } else { - // get the field name of the left table configured in the Table Function on the right - TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); - RexCall rexCall = (RexCall) tableFunctionScan.getCall(); - // support only one field in table function - RexFieldAccess rexFieldAccess = (RexFieldAccess) rexCall.operands.get(0); - String fieldName = rexFieldAccess.getField().getName(); - - int leftFieldIndex = 0; - for (int i = 0; i < nLeftColumns; i++) { - if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { - leftFieldIndex = i; - break; + if (rel.getRight() instanceof TableFunctionScan) { + // get the field name of the left table configured in the Table Function on the right + TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); + RexCall rexCall = (RexCall) tableFunctionScan.getCall(); + // support only one field in table function + RexFieldAccess rexFieldAccess = + (RexFieldAccess) rexCall.getOperands().get(0); + String fieldName = rexFieldAccess.getField().getName(); + + int leftFieldIndex = 0; + for (int i = 0; i < nLeftColumns; i++) { + if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { + leftFieldIndex = i; + break; + } } + /** + * Get the fields from the left table, don't go to + * getColumnOrigins(TableFunctionScan rel,RelMetadataQuery mq, int iOutputColumn), + * otherwise the return is null, and the UDTF field origin cannot be parsed + */ + set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); + + // process transform for udtf + String transform = rexCall.toString().replace(rexFieldAccess.toString(), fieldName) + + DELIMITER + + tableFunctionScan.getRowType().getFieldNames().get(iOutputColumn - nLeftColumns); + set = createDerivedColumnOrigins(set, transform, false); + } else { + set = mq.getColumnOrigins(rel.getRight(), iOutputColumn - nLeftColumns); } - /** - * Get the fields from the left table, don't go to getColumnOrigins(TableFunctionScan - * rel,RelMetadataQuery mq, int iOutputColumn), otherwise the return is null, and the - * UDTF field origin cannot be parsed - */ - set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); } return set; } public Set getColumnOrigins(SetOp rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelNode input : rel.getInputs()) { Set inputSet = mq.getColumnOrigins(input, iOutputColumn); if (inputSet == null) { - return null; + return Collections.emptySet(); } set.addAll(inputSet); } return set; } - /** Support the field blood relationship of lookup join */ + /** + * Support the field blood relationship of lookup join + */ public Set getColumnOrigins(Snapshot rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } - /** Support the field blood relationship of watermark */ + /** + * Support the field blood relationship of watermark + */ public Set getColumnOrigins(SingleRel rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } /** - * Support field blood relationship of CEP. The first column is the field after PARTITION BY, - * and the other columns come from the measures in Match + * Support for new fields in the source table similar to those created with the LOCALTIMESTAMP function + */ + public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { + final RelNode input = rel.getInput(); + RexNode rexNode = rel.getProjects().get(iOutputColumn); + + if (rexNode instanceof RexInputRef) { + // Direct reference: no derivation added. + RexInputRef inputRef = (RexInputRef) rexNode; + int index = inputRef.getIndex(); + if (input instanceof TableScan) { + index = computeIndexWithOffset(rel.getProjects(), inputRef.getIndex(), iOutputColumn); + } + return mq.getColumnOrigins(input, index); + } else if (input instanceof TableScan + && rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty()) { + return mq.getColumnOrigins(input, iOutputColumn); + } + // Anything else is a derivation, possibly from multiple columns. + final Set set = getMultipleColumns(rexNode, input, mq); + return createDerivedColumnOrigins(set, rexNode.toString(), true); + } + + private int computeIndexWithOffset(List projects, int baseIndex, int iOutputColumn) { + int offset = 0; + for (int index = 0; index < iOutputColumn; index++) { + RexNode rexNode = projects.get(index); + if ((rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty())) { + offset += 1; + } + } + return baseIndex + offset; + } + + /** + * Support field blood relationship of CEP. + * The first column is the field after PARTITION BY, and the other columns come from the measures in Match */ public Set getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) { - if (iOutputColumn == 0) { + int orderCount = rel.getOrderKeys().getKeys().size(); + + if (iOutputColumn < orderCount) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } final RelNode input = rel.getInput(); - RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - 1); + RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - orderCount); RexPatternFieldRef rexPatternFieldRef = searchRexPatternFieldRef(rexNode); if (rexPatternFieldRef != null) { - return mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + final Set set = mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + String originTransform = rexNode instanceof RexCall + ? ((RexCall) rexNode).getOperands().get(0).toString() + : null; + return createDerivedColumnOrigins(set, originTransform, true); } - return null; + return Collections.emptySet(); } private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { @@ -219,46 +293,6 @@ private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { return null; } - /** Support the field blood relationship of ROW_NUMBER() */ - public Set getColumnOrigins(Window rel, RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - /** - * Haven't found a good way to judge whether the field comes from window, for the time - * being, first judge by parsing the string - */ - String fieldName = rel.getRowType().getFieldNames().get(iOutputColumn); - // for example: "w1$o0" - if (fieldName.startsWith("w") && fieldName.contains("$")) { - int groupIndex = Integer.parseInt(fieldName.substring(1, fieldName.indexOf("$"))); - final Set set = new LinkedHashSet<>(); - if (!rel.groups.isEmpty()) { - Window.Group group = rel.groups.get(groupIndex); - // process partition by keys - group.keys.asList().forEach(index -> set.addAll(mq.getColumnOrigins(input, index))); - // process order by keys - group.orderKeys - .getFieldCollations() - .forEach(e -> set.addAll(mq.getColumnOrigins(input, e.getFieldIndex()))); - } - return set; - } - return mq.getColumnOrigins(rel.getInput(), iOutputColumn); - } - - public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - RexNode rexNode = rel.getProjects().get(iOutputColumn); - - if (rexNode instanceof RexInputRef) { - // Direct reference: no derivation added. - RexInputRef inputRef = (RexInputRef) rexNode; - return mq.getColumnOrigins(input, inputRef.getIndex()); - } - // Anything else is a derivation, possibly from multiple columns. - final Set set = getMultipleColumns(rexNode, input, mq); - return createDerivedColumnOrigins(set); - } - public Set getColumnOrigins(Calc rel, final RelMetadataQuery mq, int iOutputColumn) { final RelNode input = rel.getInput(); final RexShuttle rexShuttle = new RexShuttle() { @@ -277,30 +311,6 @@ public RexNode visitLocalRef(RexLocalRef localRef) { // Direct reference: no derivation added. RexInputRef inputRef = (RexInputRef) rexNode; return mq.getColumnOrigins(input, inputRef.getIndex()); - } else if (rexNode instanceof RexCall && ((RexCall) rexNode).operands.isEmpty()) { - // support for new fields in the source table similar to those created with the - // LOCALTIMESTAMP function - TableSourceTable table = ((TableSourceTable) rel.getInput().getTable()); - if (table != null) { - String targetFieldName = rel.getProgram() - .getOutputRowType() - .getFieldList() - .get(iOutputColumn) - .getName(); - List fieldList = - table.contextResolvedTable().getResolvedSchema().getColumnNames(); - - int index = -1; - for (int i = 0; i < fieldList.size(); i++) { - if (fieldList.get(i).equalsIgnoreCase(targetFieldName)) { - index = i; - break; - } - } - if (index != -1) { - return Collections.singleton(new RelColumnOrigin(table, index, false)); - } - } } // Anything else is a derivation, possibly from multiple columns. final Set set = getMultipleColumns(rexNode, input, mq); @@ -324,14 +334,14 @@ public Set getColumnOrigins(Exchange rel, RelMetadataQuery mq, } public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); Set mappings = rel.getColumnMappings(); if (mappings == null) { - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // This is a non-leaf transformation: say we don't // know about origins, because there are probably // columns below. - return null; + return Collections.emptySet(); } else { // This is a leaf transformation: say there are fer sure no // column origins. @@ -346,7 +356,7 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ final int column = mapping.iInputColumn; Set origins = mq.getColumnOrigins(input, column); if (origins == null) { - return null; + return Collections.emptySet(); } if (mapping.derived) { origins = createDerivedColumnOrigins(origins); @@ -357,18 +367,19 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ } // Catch-all rule when none of the others apply. + @SuppressWarnings("squid:S1172") public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, int iOutputColumn) { // NOTE jvs 28-Mar-2006: We may get this wrong for a physical table // expression which supports projections. In that case, // it's up to the plugin writer to override with the // correct information. - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // No generic logic available for non-leaf rels. - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); RelOptTable table = rel.getTable(); if (table == null) { @@ -383,7 +394,7 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i // names.) This detection assumes the table expression doesn't handle // rename as well. if (table.getRowType() != rel.getRowType()) { - return null; + return Collections.emptySet(); } set.add(new RelColumnOrigin(table, iOutputColumn, false)); @@ -392,9 +403,9 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i private Set createDerivedColumnOrigins(Set inputSet) { if (inputSet == null) { - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelColumnOrigin rco : inputSet) { RelColumnOrigin derived = new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true); set.add(derived); @@ -402,10 +413,113 @@ private Set createDerivedColumnOrigins(Set inp return set; } + private Set createDerivedColumnOrigins( + Set inputSet, String transform, boolean originTransform) { + if (inputSet == null || inputSet.isEmpty()) { + return Collections.emptySet(); + } + final Set set = new LinkedHashSet<>(); + + String finalTransform = originTransform ? computeTransform(inputSet, transform) : transform; + for (RelColumnOrigin rco : inputSet) { + RelColumnOrigin derived = + new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true, finalTransform); + set.add(derived); + } + return set; + } + + /** + * Replace the variable at the beginning of $ in input with the real field information + */ + private String computeTransform(Set inputSet, String transform) { + LOG.debug("origin transform: {}", transform); + Pattern pattern = Pattern.compile("\\$\\d+"); + Matcher matcher = pattern.matcher(transform); + + Set operandSet = new LinkedHashSet<>(); + while (matcher.find()) { + operandSet.add(matcher.group()); + } + + if (operandSet.isEmpty()) { + LOG.info("operandSet is empty"); + return null; + } + if (inputSet.size() != operandSet.size()) { + LOG.warn( + "The number [{}] of fields in the source tables are not equal to operands [{}]", + inputSet.size(), + operandSet.size()); + return null; + } + + Map sourceColumnMap = new HashMap<>(); + Iterator iterator = optimizeSourceColumnSet(inputSet).iterator(); + operandSet.forEach(e -> sourceColumnMap.put(e, iterator.next())); + LOG.debug("sourceColumnMap: {}", sourceColumnMap); + + matcher = pattern.matcher(transform); + String temp; + while (matcher.find()) { + temp = matcher.group(); + transform = transform.replace(temp, sourceColumnMap.get(temp)); + } + + // temporary special treatment + transform = transform.replace("_UTF-16LE", ""); + LOG.debug("transform: {}", transform); + return transform; + } + + /** + * Increase the readability of transform. + * if catalog, database and table are the same, return field. + * If the catalog and database are the same, return the table and field. + * If the catalog is the same, return the database, table, field. + * Otherwise, return all + */ + private Set optimizeSourceColumnSet(Set inputSet) { + Set catalogSet = new HashSet<>(); + Set databaseSet = new HashSet<>(); + Set tableSet = new HashSet<>(); + Set> qualifiedSet = new LinkedHashSet<>(); + for (RelColumnOrigin rco : inputSet) { + RelOptTable originTable = rco.getOriginTable(); + List qualifiedName = originTable.getQualifiedName(); + + // catalog,database,table,field + List qualifiedList = new ArrayList<>(qualifiedName); + catalogSet.add(qualifiedName.get(0)); + databaseSet.add(qualifiedName.get(1)); + tableSet.add(qualifiedName.get(2)); + + String field = rco.getTransform() != null + ? rco.getTransform() + : originTable.getRowType().getFieldNames().get(rco.getOriginColumnOrdinal()); + qualifiedList.add(field); + qualifiedSet.add(qualifiedList); + } + if (catalogSet.size() == 1 && databaseSet.size() == 1 && tableSet.size() == 1) { + return optimizeName(qualifiedSet, e -> e.get(3)); + } else if (catalogSet.size() == 1 && databaseSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(2, 4))); + } else if (catalogSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(1, 4))); + } else { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e)); + } + } + + private Set optimizeName(Set> qualifiedSet, Function, String> mapper) { + return qualifiedSet.stream().map(mapper).collect(Collectors.toSet()); + } + private Set getMultipleColumns(RexNode rexNode, RelNode input, final RelMetadataQuery mq) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); final RexVisitor visitor = new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { Set inputSet = mq.getColumnOrigins(input, inputRef.getIndex()); if (inputSet != null) { diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 0288e906fc..d8547b223d 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -23,7 +23,6 @@ import org.dinky.context.DinkyClassLoaderContextHolder; import org.dinky.data.model.LineageRel; import org.dinky.data.result.SqlExplainResult; -import org.dinky.utils.FlinkStreamProgramWithoutPhysical; import org.dinky.utils.LineageContext; import org.apache.flink.api.dag.Transformation; @@ -50,7 +49,6 @@ import org.apache.flink.table.operations.command.ResetOperation; import org.apache.flink.table.operations.command.SetOperation; import org.apache.flink.table.operations.ddl.CreateTableOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; import org.apache.flink.types.Row; import java.util.ArrayList; @@ -78,13 +76,10 @@ public class CustomTableEnvironmentImpl extends AbstractCustomTableEnvironment { private static final Logger log = LoggerFactory.getLogger(CustomTableEnvironmentImpl.class); - private final FlinkChainedProgram flinkChainedProgram; private static final ObjectMapper mapper = new ObjectMapper(); public CustomTableEnvironmentImpl(StreamTableEnvironment streamTableEnvironment) { super(streamTableEnvironment); - this.flinkChainedProgram = FlinkStreamProgramWithoutPhysical.buildProgram( - (Configuration) getStreamExecutionEnvironment().getConfiguration()); } public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment) { @@ -260,8 +255,7 @@ private void setConfiguration(StreamExecutionEnvironment environment, Map getLineage(String statement) { - LineageContext lineageContext = - new LineageContext(flinkChainedProgram, (TableEnvironmentImpl) streamTableEnvironment); + LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); return lineageContext.getLineage(statement); } diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java deleted file mode 100644 index bf4808939d..0000000000 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.utils; - -import org.apache.calcite.plan.Convention; -import org.apache.calcite.plan.hep.HepMatchOrder; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.api.config.OptimizerConfigOptions; -import org.apache.flink.table.planner.plan.nodes.FlinkConventions; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkDecorrelateProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkVolcanoProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.HEP_RULES_EXECUTION_TYPE; -import org.apache.flink.table.planner.plan.rules.FlinkStreamRuleSets; - -/** - * FlinkStreamProgramWithoutPhysical - * - * @since 2022/11/22 - */ -public class FlinkStreamProgramWithoutPhysical { - - private static final String SUBQUERY_REWRITE = "subquery_rewrite"; - private static final String TEMPORAL_JOIN_REWRITE = "temporal_join_rewrite"; - private static final String DECORRELATE = "decorrelate"; - private static final String DEFAULT_REWRITE = "default_rewrite"; - private static final String PREDICATE_PUSHDOWN = "predicate_pushdown"; - private static final String JOIN_REORDER = "join_reorder"; - private static final String PROJECT_REWRITE = "project_rewrite"; - private static final String LOGICAL = "logical"; - private static final String LOGICAL_REWRITE = "logical_rewrite"; - - public static FlinkChainedProgram buildProgram(Configuration config) { - FlinkChainedProgram chainedProgram = new FlinkChainedProgram(); - - // rewrite sub-queries to joins - chainedProgram.addLast( - SUBQUERY_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite QueryOperationCatalogViewTable before rewriting sub-queries - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references before rewriting sub-queries to" + " semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.SEMI_JOIN_RULES()) - .build(), - "rewrite sub-queries to semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_SUBQUERY_RULES()) - .build(), - "sub-queries remove") - // convert RelOptTableImpl (which exists in SubQuery before) to - // FlinkRelOptTable - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references after sub-queries removed") - .build()); - - // rewrite special temporal join plan - chainedProgram.addLast( - TEMPORAL_JOIN_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.EXPAND_PLAN_RULES()) - .build(), - "convert correlate to temporal table join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.POST_EXPAND_CLEAN_UP_RULES()) - .build(), - "convert enumerable table scan") - .build()); - - // query decorrelation - chainedProgram.addLast( - DECORRELATE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite before decorrelation - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRE_DECORRELATION_RULES()) - .build(), - "pre-rewrite before decorrelation") - .addProgram(new FlinkDecorrelateProgram(), "") - .build()); - - // default rewrite, includes: predicate simplification, expression reduction, window - // properties rewrite, etc. - chainedProgram.addLast( - DEFAULT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.DEFAULT_REWRITE_RULES()) - .build()); - - // rule based optimization: push down predicate(s) in where clause, so it only needs to read - // the required data - chainedProgram.addLast( - PREDICATE_PUSHDOWN, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_PREDICATE_REWRITE_RULES()) - .build(), - "join predicate rewrite") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.FILTER_PREPARE_RULES()) - .build(), - "filter rules") - .setIterations(5) - .build(), - "predicate rewrite") - .addProgram( - // PUSH_PARTITION_DOWN_RULES should always be in front of - // PUSH_FILTER_DOWN_RULES - // to prevent PUSH_FILTER_DOWN_RULES from consuming the predicates - // in partitions - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PUSH_PARTITION_DOWN_RULES()) - .build(), - "push down partitions into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PUSH_FILTER_DOWN_RULES()) - .build(), - "push down filters into table scan") - .build(), - "push predicate into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRUNE_EMPTY_RULES()) - .build(), - "prune empty after predicate push down") - .build()); - - // join reorder - if (config.getBoolean(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED)) { - chainedProgram.addLast( - JOIN_REORDER, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_PREPARE_RULES()) - .build(), - "merge join into MultiJoin") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_RULES()) - .build(), - "do join reorder") - .build()); - } - - // project rewrite - chainedProgram.addLast( - PROJECT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PROJECT_RULES()) - .build()); - - // optimize the logical plan - chainedProgram.addLast( - LOGICAL, - FlinkVolcanoProgramBuilder.newBuilder() - .add(FlinkStreamRuleSets.LOGICAL_OPT_RULES()) - .setRequiredOutputTraits(new Convention.Impl[] {FlinkConventions.LOGICAL()}) - .build()); - - // logical rewrite - chainedProgram.addLast( - LOGICAL_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.LOGICAL_REWRITE()) - .build()); - - return chainedProgram; - } -} diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java index 2c31236b97..d707ade42a 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java @@ -27,23 +27,13 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; -import org.apache.flink.table.catalog.CatalogManager; -import org.apache.flink.table.catalog.FunctionCatalog; -import org.apache.flink.table.module.ModuleManager; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; -import org.apache.flink.table.planner.calcite.FlinkRelBuilder; -import org.apache.flink.table.planner.calcite.RexFactory; -import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.StreamOptimizeContext; import org.apache.flink.table.planner.plan.schema.TableSourceTable; -import org.apache.flink.table.planner.plan.trait.MiniBatchInterval; import java.util.ArrayList; import java.util.List; @@ -56,11 +46,9 @@ */ public class LineageContext { - private final FlinkChainedProgram flinkChainedProgram; private final TableEnvironmentImpl tableEnv; - public LineageContext(FlinkChainedProgram flinkChainedProgram, TableEnvironmentImpl tableEnv) { - this.flinkChainedProgram = flinkChainedProgram; + public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } @@ -70,11 +58,8 @@ public List getLineage(String statement) { String sinkTable = parsed.getField(0); RelNode oriRelNode = parsed.getField(1); - // 2. Optimize original relNode to generate Optimized Logical Plan - RelNode optRelNode = optimize(oriRelNode); - - // 3. Build lineage based from RelMetadataQuery - return buildFiledLineageResult(sinkTable, optRelNode); + // 2. Build lineage based from RelMetadataQuery + return buildFiledLineageResult(sinkTable, oriRelNode); } private Tuple2 parseStatement(String sql) { @@ -96,71 +81,6 @@ private Tuple2 parseStatement(String sql) { } } - /** Calling each program's optimize method in sequence. */ - private RelNode optimize(RelNode relNode) { - return flinkChainedProgram.optimize(relNode, new StreamOptimizeContext() { - - @Override - public boolean isBatchMode() { - return false; - } - - @Override - public TableConfig getTableConfig() { - return tableEnv.getConfig(); - } - - @Override - public FunctionCatalog getFunctionCatalog() { - return getPlanner().getFlinkContext().getFunctionCatalog(); - } - - @Override - public CatalogManager getCatalogManager() { - return tableEnv.getCatalogManager(); - } - - @Override - public ModuleManager getModuleManager() { - return getPlanner().getFlinkContext().getModuleManager(); - } - - @Override - public RexFactory getRexFactory() { - return getPlanner().getFlinkContext().getRexFactory(); - } - - @Override - public FlinkRelBuilder getFlinkRelBuilder() { - return getPlanner().createRelBuilder(); - } - - @Override - public boolean isUpdateBeforeRequired() { - return false; - } - - @Override - public MiniBatchInterval getMiniBatchInterval() { - return MiniBatchInterval.NONE; - } - - @Override - public boolean needFinalTimeIndicatorConversion() { - return true; - } - - @Override - public ClassLoader getClassLoader() { - return getPlanner().getFlinkContext().getClassLoader(); - } - - private PlannerBase getPlanner() { - return (PlannerBase) tableEnv.getPlanner(); - } - }); - } - /** Check the size of query and sink fields match */ private void validateSchema(String sinkTable, RelNode relNode, List sinkFieldList) { List queryFieldList = relNode.getRowType().getFieldNames(); @@ -204,7 +124,8 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe String sourceColumn = fieldNames.get(ordinal); // add record - resultList.add(LineageRel.build(sourceTable, sourceColumn, sinkTable, targetColumn)); + resultList.add(LineageRel.build( + sourceTable, sourceColumn, sinkTable, targetColumn, relColumnOrigin.getTransform())); } } } diff --git a/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java new file mode 100644 index 0000000000..fa22d6eec6 --- /dev/null +++ b/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java @@ -0,0 +1,112 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import static org.junit.Assert.assertEquals; + +import org.dinky.data.model.LineageRel; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.EnvironmentSettings; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableEnvironmentImpl; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * @description: LineageContextTest + * @author: HamaWhite + */ +public class LineageContextTest { + + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; + + @BeforeClass + public static void setUp() { + StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(new Configuration()); + + EnvironmentSettings settings = + EnvironmentSettings.newInstance().inStreamingMode().build(); + tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + + context = new LineageContext(tableEnv); + } + + @Before + public void init() { + // create table ST + tableEnv.executeSql("DROP TABLE IF EXISTS ST"); + tableEnv.executeSql("CREATE TABLE ST ( " + " a STRING ," + + " b STRING ," + + " c STRING " + + ") WITH ( " + + " 'connector' = 'datagen' ," + + " 'rows-per-second' = '1' " + + ")"); + + // create table TT + tableEnv.executeSql("DROP TABLE IF EXISTS TT"); + tableEnv.executeSql("CREATE TABLE TT ( " + " A STRING ," + + " B STRING " + + ") WITH ( " + + " 'connector' = 'print' " + + ")"); + } + + @Test + public void testGetLineage() { + List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + String[][] expectedArray = { + {"ST", "a", "TT", "A", "||(a, c)"}, + {"ST", "c", "TT", "A", "||(a, c)"}, + {"ST", "b", "TT", "B", "||(b, c)"}, + {"ST", "c", "TT", "B", "||(b, c)"} + }; + + List expectedList = buildResult(expectedArray); + assertEquals(expectedList, actualList); + } + + private List buildResult(String[][] expectedArray) { + return Stream.of(expectedArray) + .map(e -> { + String transform = e.length == 5 ? e[4] : null; + return new LineageRel( + "default_catalog", + "default_database", + e[0], + e[1], + "default_catalog", + "default_database", + e[2], + e[3], + transform); + }) + .collect(Collectors.toList()); + } +} diff --git a/dinky-client/dinky-client-1.17/pom.xml b/dinky-client/dinky-client-1.17/pom.xml index 24b38560cc..7e38ca2688 100644 --- a/dinky-client/dinky-client-1.17/pom.xml +++ b/dinky-client/dinky-client-1.17/pom.xml @@ -41,6 +41,11 @@ com.sun.xml.bind jaxb-core + + junit + junit + provided + diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java b/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java new file mode 100644 index 0000000000..5fc8dc24cb --- /dev/null +++ b/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java @@ -0,0 +1,116 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.calcite.rel.metadata; + +import org.apache.calcite.plan.RelOptTable; + +/** + * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelColumnOrigin + *

              + * Modification point: + *

                + *
              1. add transform field and related code. + *
              + * + * @description: RelColumnOrigin is a data structure describing one of the origins of an + * output column produced by a relational expression. + * @author: HamaWhite + */ +public class RelColumnOrigin { + // ~ Instance fields -------------------------------------------------------- + + private final RelOptTable originTable; + + private final int iOriginColumn; + + private final boolean isDerived; + + /** + * Stores the expression for data conversion, + * which source table fields are transformed by which expression the target field + */ + private String transform; + + // ~ Constructors ----------------------------------------------------------- + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + } + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived, String transform) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + this.transform = transform; + } + + // ~ Methods ---------------------------------------------------------------- + + /** + * Returns table of origin. + */ + public RelOptTable getOriginTable() { + return originTable; + } + + /** + * Returns the 0-based index of column in origin table; whether this ordinal + * is flattened or unflattened depends on whether UDT flattening has already + * been performed on the relational expression which produced this + * description. + */ + public int getOriginColumnOrdinal() { + return iOriginColumn; + } + + /** + * Consider the query select a+b as c, d as e from t. The + * output column c has two origins (a and b), both of them derived. The + * output column d as one origin (c), which is not derived. + * + * @return false if value taken directly from column in origin table; true + * otherwise + */ + public boolean isDerived() { + return isDerived; + } + + public String getTransform() { + return transform; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RelColumnOrigin)) { + return false; + } + RelColumnOrigin other = (RelColumnOrigin) obj; + return originTable.getQualifiedName().equals(other.originTable.getQualifiedName()) + && (iOriginColumn == other.iOriginColumn) + && (isDerived == other.isDerived); + } + + @Override + public int hashCode() { + return originTable.getQualifiedName().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + } +} diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java b/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java index 6bdad4d186..5c8aae002a 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java @@ -36,7 +36,7 @@ import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.TableFunctionScan; import org.apache.calcite.rel.core.TableModify; -import org.apache.calcite.rel.core.Window; +import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexFieldAccess; @@ -48,33 +48,47 @@ import org.apache.calcite.rex.RexVisitor; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.util.BuiltInMethod; -import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelMdColumnOrigins * - *

              Modification point: 1. Support lookup join, add method getColumnOrigins(Snapshot - * rel,RelMetadataQuery mq, int iOutputColumn) 2. Support watermark, add method - * getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) 3. Support table function, - * add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) 4. Support - * field AS LOCALTIMESTAMP, modify method getColumnOrigins(Calc rel, RelMetadataQuery mq, int - * iOutputColumn) 5. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int - * iOutputColumn) 6. Support ROW_NUMBER(), add method getColumnOrigins(Window rel, RelMetadataQuery - * mq, int iOutputColumn)* + *

              Modification point: + *

                + *
              1. Support lookup join, add method getColumnOrigins(Snapshot rel,RelMetadataQuery mq, int iOutputColumn) + *
              2. Support watermark, add method getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) + *
              3. Support table function, add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) + *
              4. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) + *
              5. Support transform, add method createDerivedColumnOrigins(Set inputSet, String transform, boolean originTransform), and related code + *
              6. Support field AS LOCALTIMESTAMP, modify method getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
              7. Support PROCTIME() is the first filed, add method computeIndexWithOffset, used by getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
                  * - * @description: RelMdColumnOrigins supplies a default implementation of {@link - * RelMetadataQuery#getColumnOrigins} for the standard logical algebra. - * @version: 1.0.0 + * @description: RelMdColumnOrigins supplies a default implementation of {@link RelMetadataQuery#getColumnOrigins} for the standard logical algebra. + * @author: HamaWhite */ public class RelMdColumnOrigins implements MetadataHandler { + private static final Logger LOG = LoggerFactory.getLogger(RelMdColumnOrigins.class); + + public static final String DELIMITER = "."; + public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider.reflectiveSource( BuiltInMethod.COLUMN_ORIGIN.method, new RelMdColumnOrigins()); @@ -98,10 +112,10 @@ public Set getColumnOrigins(Aggregate rel, RelMetadataQuery mq, // Aggregate columns are derived from input columns AggregateCall call = rel.getAggCallList().get(iOutputColumn - rel.getGroupCount()); - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (Integer iInput : call.getArgList()) { Set inputSet = mq.getColumnOrigins(rel.getInput(), iInput); - inputSet = createDerivedColumnOrigins(inputSet); + inputSet = createDerivedColumnOrigins(inputSet, call.toString(), true); if (inputSet != null) { set.addAll(inputSet); } @@ -132,7 +146,9 @@ public Set getColumnOrigins(Join rel, RelMetadataQuery mq, int return set; } - /** Support the field blood relationship of table function */ + /** + * Support the field blood relationship of table function + */ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) { List leftFieldList = rel.getLeft().getRowType().getFieldList(); @@ -142,68 +158,126 @@ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, if (iOutputColumn < nLeftColumns) { set = mq.getColumnOrigins(rel.getLeft(), iOutputColumn); } else { - // get the field name of the left table configured in the Table Function on the right - TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); - RexCall rexCall = (RexCall) tableFunctionScan.getCall(); - // support only one field in table function - RexFieldAccess rexFieldAccess = (RexFieldAccess) rexCall.operands.get(0); - String fieldName = rexFieldAccess.getField().getName(); - - int leftFieldIndex = 0; - for (int i = 0; i < nLeftColumns; i++) { - if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { - leftFieldIndex = i; - break; + if (rel.getRight() instanceof TableFunctionScan) { + // get the field name of the left table configured in the Table Function on the right + TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); + RexCall rexCall = (RexCall) tableFunctionScan.getCall(); + // support only one field in table function + RexFieldAccess rexFieldAccess = + (RexFieldAccess) rexCall.getOperands().get(0); + String fieldName = rexFieldAccess.getField().getName(); + + int leftFieldIndex = 0; + for (int i = 0; i < nLeftColumns; i++) { + if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { + leftFieldIndex = i; + break; + } } + /** + * Get the fields from the left table, don't go to + * getColumnOrigins(TableFunctionScan rel,RelMetadataQuery mq, int iOutputColumn), + * otherwise the return is null, and the UDTF field origin cannot be parsed + */ + set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); + + // process transform for udtf + String transform = rexCall.toString().replace(rexFieldAccess.toString(), fieldName) + + DELIMITER + + tableFunctionScan.getRowType().getFieldNames().get(iOutputColumn - nLeftColumns); + set = createDerivedColumnOrigins(set, transform, false); + } else { + set = mq.getColumnOrigins(rel.getRight(), iOutputColumn - nLeftColumns); } - /** - * Get the fields from the left table, don't go to getColumnOrigins(TableFunctionScan - * rel,RelMetadataQuery mq, int iOutputColumn), otherwise the return is null, and the - * UDTF field origin cannot be parsed - */ - set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); } return set; } public Set getColumnOrigins(SetOp rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelNode input : rel.getInputs()) { Set inputSet = mq.getColumnOrigins(input, iOutputColumn); if (inputSet == null) { - return null; + return Collections.emptySet(); } set.addAll(inputSet); } return set; } - /** Support the field blood relationship of lookup join */ + /** + * Support the field blood relationship of lookup join + */ public Set getColumnOrigins(Snapshot rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } - /** Support the field blood relationship of watermark */ + /** + * Support the field blood relationship of watermark + */ public Set getColumnOrigins(SingleRel rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } /** - * Support field blood relationship of CEP. The first column is the field after PARTITION BY, - * and the other columns come from the measures in Match + * Support for new fields in the source table similar to those created with the LOCALTIMESTAMP function + */ + public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { + final RelNode input = rel.getInput(); + RexNode rexNode = rel.getProjects().get(iOutputColumn); + + if (rexNode instanceof RexInputRef) { + // Direct reference: no derivation added. + RexInputRef inputRef = (RexInputRef) rexNode; + int index = inputRef.getIndex(); + if (input instanceof TableScan) { + index = computeIndexWithOffset(rel.getProjects(), inputRef.getIndex(), iOutputColumn); + } + return mq.getColumnOrigins(input, index); + } else if (input instanceof TableScan + && rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty()) { + return mq.getColumnOrigins(input, iOutputColumn); + } + // Anything else is a derivation, possibly from multiple columns. + final Set set = getMultipleColumns(rexNode, input, mq); + return createDerivedColumnOrigins(set, rexNode.toString(), true); + } + + private int computeIndexWithOffset(List projects, int baseIndex, int iOutputColumn) { + int offset = 0; + for (int index = 0; index < iOutputColumn; index++) { + RexNode rexNode = projects.get(index); + if ((rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty())) { + offset += 1; + } + } + return baseIndex + offset; + } + + /** + * Support field blood relationship of CEP. + * The first column is the field after PARTITION BY, and the other columns come from the measures in Match */ public Set getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) { - if (iOutputColumn == 0) { + int orderCount = rel.getOrderKeys().getKeys().size(); + + if (iOutputColumn < orderCount) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } final RelNode input = rel.getInput(); - RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - 1); + RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - orderCount); RexPatternFieldRef rexPatternFieldRef = searchRexPatternFieldRef(rexNode); if (rexPatternFieldRef != null) { - return mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + final Set set = mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + String originTransform = rexNode instanceof RexCall + ? ((RexCall) rexNode).getOperands().get(0).toString() + : null; + return createDerivedColumnOrigins(set, originTransform, true); } - return null; + return Collections.emptySet(); } private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { @@ -219,46 +293,6 @@ private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { return null; } - /** Support the field blood relationship of ROW_NUMBER() */ - public Set getColumnOrigins(Window rel, RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - /** - * Haven't found a good way to judge whether the field comes from window, for the time - * being, first judge by parsing the string - */ - String fieldName = rel.getRowType().getFieldNames().get(iOutputColumn); - // for example: "w1$o0" - if (fieldName.startsWith("w") && fieldName.contains("$")) { - int groupIndex = Integer.parseInt(fieldName.substring(1, fieldName.indexOf("$"))); - final Set set = new LinkedHashSet<>(); - if (!rel.groups.isEmpty()) { - Window.Group group = rel.groups.get(groupIndex); - // process partition by keys - group.keys.asList().forEach(index -> set.addAll(mq.getColumnOrigins(input, index))); - // process order by keys - group.orderKeys - .getFieldCollations() - .forEach(e -> set.addAll(mq.getColumnOrigins(input, e.getFieldIndex()))); - } - return set; - } - return mq.getColumnOrigins(rel.getInput(), iOutputColumn); - } - - public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - RexNode rexNode = rel.getProjects().get(iOutputColumn); - - if (rexNode instanceof RexInputRef) { - // Direct reference: no derivation added. - RexInputRef inputRef = (RexInputRef) rexNode; - return mq.getColumnOrigins(input, inputRef.getIndex()); - } - // Anything else is a derivation, possibly from multiple columns. - final Set set = getMultipleColumns(rexNode, input, mq); - return createDerivedColumnOrigins(set); - } - public Set getColumnOrigins(Calc rel, final RelMetadataQuery mq, int iOutputColumn) { final RelNode input = rel.getInput(); final RexShuttle rexShuttle = new RexShuttle() { @@ -277,30 +311,6 @@ public RexNode visitLocalRef(RexLocalRef localRef) { // Direct reference: no derivation added. RexInputRef inputRef = (RexInputRef) rexNode; return mq.getColumnOrigins(input, inputRef.getIndex()); - } else if (rexNode instanceof RexCall && ((RexCall) rexNode).operands.isEmpty()) { - // support for new fields in the source table similar to those created with the - // LOCALTIMESTAMP function - TableSourceTable table = ((TableSourceTable) rel.getInput().getTable()); - if (table != null) { - String targetFieldName = rel.getProgram() - .getOutputRowType() - .getFieldList() - .get(iOutputColumn) - .getName(); - List fieldList = - table.contextResolvedTable().getResolvedSchema().getColumnNames(); - - int index = -1; - for (int i = 0; i < fieldList.size(); i++) { - if (fieldList.get(i).equalsIgnoreCase(targetFieldName)) { - index = i; - break; - } - } - if (index != -1) { - return Collections.singleton(new RelColumnOrigin(table, index, false)); - } - } } // Anything else is a derivation, possibly from multiple columns. final Set set = getMultipleColumns(rexNode, input, mq); @@ -324,14 +334,14 @@ public Set getColumnOrigins(Exchange rel, RelMetadataQuery mq, } public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); Set mappings = rel.getColumnMappings(); if (mappings == null) { - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // This is a non-leaf transformation: say we don't // know about origins, because there are probably // columns below. - return null; + return Collections.emptySet(); } else { // This is a leaf transformation: say there are fer sure no // column origins. @@ -346,7 +356,7 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ final int column = mapping.iInputColumn; Set origins = mq.getColumnOrigins(input, column); if (origins == null) { - return null; + return Collections.emptySet(); } if (mapping.derived) { origins = createDerivedColumnOrigins(origins); @@ -357,18 +367,19 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ } // Catch-all rule when none of the others apply. + @SuppressWarnings("squid:S1172") public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, int iOutputColumn) { // NOTE jvs 28-Mar-2006: We may get this wrong for a physical table // expression which supports projections. In that case, // it's up to the plugin writer to override with the // correct information. - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // No generic logic available for non-leaf rels. - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); RelOptTable table = rel.getTable(); if (table == null) { @@ -383,7 +394,7 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i // names.) This detection assumes the table expression doesn't handle // rename as well. if (table.getRowType() != rel.getRowType()) { - return null; + return Collections.emptySet(); } set.add(new RelColumnOrigin(table, iOutputColumn, false)); @@ -392,9 +403,9 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i private Set createDerivedColumnOrigins(Set inputSet) { if (inputSet == null) { - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelColumnOrigin rco : inputSet) { RelColumnOrigin derived = new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true); set.add(derived); @@ -402,10 +413,113 @@ private Set createDerivedColumnOrigins(Set inp return set; } + private Set createDerivedColumnOrigins( + Set inputSet, String transform, boolean originTransform) { + if (inputSet == null || inputSet.isEmpty()) { + return Collections.emptySet(); + } + final Set set = new LinkedHashSet<>(); + + String finalTransform = originTransform ? computeTransform(inputSet, transform) : transform; + for (RelColumnOrigin rco : inputSet) { + RelColumnOrigin derived = + new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true, finalTransform); + set.add(derived); + } + return set; + } + + /** + * Replace the variable at the beginning of $ in input with the real field information + */ + private String computeTransform(Set inputSet, String transform) { + LOG.debug("origin transform: {}", transform); + Pattern pattern = Pattern.compile("\\$\\d+"); + Matcher matcher = pattern.matcher(transform); + + Set operandSet = new LinkedHashSet<>(); + while (matcher.find()) { + operandSet.add(matcher.group()); + } + + if (operandSet.isEmpty()) { + LOG.info("operandSet is empty"); + return null; + } + if (inputSet.size() != operandSet.size()) { + LOG.warn( + "The number [{}] of fields in the source tables are not equal to operands [{}]", + inputSet.size(), + operandSet.size()); + return null; + } + + Map sourceColumnMap = new HashMap<>(); + Iterator iterator = optimizeSourceColumnSet(inputSet).iterator(); + operandSet.forEach(e -> sourceColumnMap.put(e, iterator.next())); + LOG.debug("sourceColumnMap: {}", sourceColumnMap); + + matcher = pattern.matcher(transform); + String temp; + while (matcher.find()) { + temp = matcher.group(); + transform = transform.replace(temp, sourceColumnMap.get(temp)); + } + + // temporary special treatment + transform = transform.replace("_UTF-16LE", ""); + LOG.debug("transform: {}", transform); + return transform; + } + + /** + * Increase the readability of transform. + * if catalog, database and table are the same, return field. + * If the catalog and database are the same, return the table and field. + * If the catalog is the same, return the database, table, field. + * Otherwise, return all + */ + private Set optimizeSourceColumnSet(Set inputSet) { + Set catalogSet = new HashSet<>(); + Set databaseSet = new HashSet<>(); + Set tableSet = new HashSet<>(); + Set> qualifiedSet = new LinkedHashSet<>(); + for (RelColumnOrigin rco : inputSet) { + RelOptTable originTable = rco.getOriginTable(); + List qualifiedName = originTable.getQualifiedName(); + + // catalog,database,table,field + List qualifiedList = new ArrayList<>(qualifiedName); + catalogSet.add(qualifiedName.get(0)); + databaseSet.add(qualifiedName.get(1)); + tableSet.add(qualifiedName.get(2)); + + String field = rco.getTransform() != null + ? rco.getTransform() + : originTable.getRowType().getFieldNames().get(rco.getOriginColumnOrdinal()); + qualifiedList.add(field); + qualifiedSet.add(qualifiedList); + } + if (catalogSet.size() == 1 && databaseSet.size() == 1 && tableSet.size() == 1) { + return optimizeName(qualifiedSet, e -> e.get(3)); + } else if (catalogSet.size() == 1 && databaseSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(2, 4))); + } else if (catalogSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(1, 4))); + } else { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e)); + } + } + + private Set optimizeName(Set> qualifiedSet, Function, String> mapper) { + return qualifiedSet.stream().map(mapper).collect(Collectors.toSet()); + } + private Set getMultipleColumns(RexNode rexNode, RelNode input, final RelMetadataQuery mq) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); final RexVisitor visitor = new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { Set inputSet = mq.getColumnOrigins(input, inputRef.getIndex()); if (inputSet != null) { diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 433de1d248..6cc8e992ba 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -23,7 +23,6 @@ import org.dinky.context.DinkyClassLoaderContextHolder; import org.dinky.data.model.LineageRel; import org.dinky.data.result.SqlExplainResult; -import org.dinky.utils.FlinkStreamProgramWithoutPhysical; import org.dinky.utils.LineageContext; import org.apache.flink.api.dag.Transformation; @@ -51,7 +50,6 @@ import org.apache.flink.table.operations.command.ResetOperation; import org.apache.flink.table.operations.command.SetOperation; import org.apache.flink.table.operations.ddl.CreateTableOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; import org.apache.flink.types.Row; import java.util.ArrayList; @@ -79,13 +77,10 @@ public class CustomTableEnvironmentImpl extends AbstractCustomTableEnvironment { private static final Logger log = LoggerFactory.getLogger(CustomTableEnvironmentImpl.class); - private final FlinkChainedProgram flinkChainedProgram; private static final ObjectMapper mapper = new ObjectMapper(); public CustomTableEnvironmentImpl(StreamTableEnvironment streamTableEnvironment) { super(streamTableEnvironment); - this.flinkChainedProgram = FlinkStreamProgramWithoutPhysical.buildProgram( - (Configuration) getStreamExecutionEnvironment().getConfiguration()); } public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment) { @@ -256,8 +251,7 @@ private void setConfiguration(StreamExecutionEnvironment environment, Map getLineage(String statement) { - LineageContext lineageContext = - new LineageContext(flinkChainedProgram, (TableEnvironmentImpl) streamTableEnvironment); + LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); return lineageContext.getLineage(statement); } diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java deleted file mode 100644 index bf4808939d..0000000000 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FlinkStreamProgramWithoutPhysical.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.utils; - -import org.apache.calcite.plan.Convention; -import org.apache.calcite.plan.hep.HepMatchOrder; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.api.config.OptimizerConfigOptions; -import org.apache.flink.table.planner.plan.nodes.FlinkConventions; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkDecorrelateProgram; -import org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.FlinkVolcanoProgramBuilder; -import org.apache.flink.table.planner.plan.optimize.program.HEP_RULES_EXECUTION_TYPE; -import org.apache.flink.table.planner.plan.rules.FlinkStreamRuleSets; - -/** - * FlinkStreamProgramWithoutPhysical - * - * @since 2022/11/22 - */ -public class FlinkStreamProgramWithoutPhysical { - - private static final String SUBQUERY_REWRITE = "subquery_rewrite"; - private static final String TEMPORAL_JOIN_REWRITE = "temporal_join_rewrite"; - private static final String DECORRELATE = "decorrelate"; - private static final String DEFAULT_REWRITE = "default_rewrite"; - private static final String PREDICATE_PUSHDOWN = "predicate_pushdown"; - private static final String JOIN_REORDER = "join_reorder"; - private static final String PROJECT_REWRITE = "project_rewrite"; - private static final String LOGICAL = "logical"; - private static final String LOGICAL_REWRITE = "logical_rewrite"; - - public static FlinkChainedProgram buildProgram(Configuration config) { - FlinkChainedProgram chainedProgram = new FlinkChainedProgram(); - - // rewrite sub-queries to joins - chainedProgram.addLast( - SUBQUERY_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite QueryOperationCatalogViewTable before rewriting sub-queries - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references before rewriting sub-queries to" + " semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.SEMI_JOIN_RULES()) - .build(), - "rewrite sub-queries to semi-join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_SUBQUERY_RULES()) - .build(), - "sub-queries remove") - // convert RelOptTableImpl (which exists in SubQuery before) to - // FlinkRelOptTable - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.TABLE_REF_RULES()) - .build(), - "convert table references after sub-queries removed") - .build()); - - // rewrite special temporal join plan - chainedProgram.addLast( - TEMPORAL_JOIN_REWRITE, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.EXPAND_PLAN_RULES()) - .build(), - "convert correlate to temporal table join") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.POST_EXPAND_CLEAN_UP_RULES()) - .build(), - "convert enumerable table scan") - .build()); - - // query decorrelation - chainedProgram.addLast( - DECORRELATE, - FlinkGroupProgramBuilder.newBuilder() - // rewrite before decorrelation - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRE_DECORRELATION_RULES()) - .build(), - "pre-rewrite before decorrelation") - .addProgram(new FlinkDecorrelateProgram(), "") - .build()); - - // default rewrite, includes: predicate simplification, expression reduction, window - // properties rewrite, etc. - chainedProgram.addLast( - DEFAULT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.DEFAULT_REWRITE_RULES()) - .build()); - - // rule based optimization: push down predicate(s) in where clause, so it only needs to read - // the required data - chainedProgram.addLast( - PREDICATE_PUSHDOWN, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_PREDICATE_REWRITE_RULES()) - .build(), - "join predicate rewrite") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.FILTER_PREPARE_RULES()) - .build(), - "filter rules") - .setIterations(5) - .build(), - "predicate rewrite") - .addProgram( - // PUSH_PARTITION_DOWN_RULES should always be in front of - // PUSH_FILTER_DOWN_RULES - // to prevent PUSH_FILTER_DOWN_RULES from consuming the predicates - // in partitions - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PUSH_PARTITION_DOWN_RULES()) - .build(), - "push down partitions into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType( - HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PUSH_FILTER_DOWN_RULES()) - .build(), - "push down filters into table scan") - .build(), - "push predicate into table scan") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PRUNE_EMPTY_RULES()) - .build(), - "prune empty after predicate push down") - .build()); - - // join reorder - if (config.getBoolean(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED)) { - chainedProgram.addLast( - JOIN_REORDER, - FlinkGroupProgramBuilder.newBuilder() - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_PREPARE_RULES()) - .build(), - "merge join into MultiJoin") - .addProgram( - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.JOIN_REORDER_RULES()) - .build(), - "do join reorder") - .build()); - } - - // project rewrite - chainedProgram.addLast( - PROJECT_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_COLLECTION()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.PROJECT_RULES()) - .build()); - - // optimize the logical plan - chainedProgram.addLast( - LOGICAL, - FlinkVolcanoProgramBuilder.newBuilder() - .add(FlinkStreamRuleSets.LOGICAL_OPT_RULES()) - .setRequiredOutputTraits(new Convention.Impl[] {FlinkConventions.LOGICAL()}) - .build()); - - // logical rewrite - chainedProgram.addLast( - LOGICAL_REWRITE, - FlinkHepRuleSetProgramBuilder.newBuilder() - .setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE()) - .setHepMatchOrder(HepMatchOrder.BOTTOM_UP) - .add(FlinkStreamRuleSets.LOGICAL_REWRITE()) - .build()); - - return chainedProgram; - } -} diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java index 2c31236b97..d707ade42a 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java @@ -27,23 +27,13 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; -import org.apache.flink.table.catalog.CatalogManager; -import org.apache.flink.table.catalog.FunctionCatalog; -import org.apache.flink.table.module.ModuleManager; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; -import org.apache.flink.table.planner.calcite.FlinkRelBuilder; -import org.apache.flink.table.planner.calcite.RexFactory; -import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.StreamOptimizeContext; import org.apache.flink.table.planner.plan.schema.TableSourceTable; -import org.apache.flink.table.planner.plan.trait.MiniBatchInterval; import java.util.ArrayList; import java.util.List; @@ -56,11 +46,9 @@ */ public class LineageContext { - private final FlinkChainedProgram flinkChainedProgram; private final TableEnvironmentImpl tableEnv; - public LineageContext(FlinkChainedProgram flinkChainedProgram, TableEnvironmentImpl tableEnv) { - this.flinkChainedProgram = flinkChainedProgram; + public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } @@ -70,11 +58,8 @@ public List getLineage(String statement) { String sinkTable = parsed.getField(0); RelNode oriRelNode = parsed.getField(1); - // 2. Optimize original relNode to generate Optimized Logical Plan - RelNode optRelNode = optimize(oriRelNode); - - // 3. Build lineage based from RelMetadataQuery - return buildFiledLineageResult(sinkTable, optRelNode); + // 2. Build lineage based from RelMetadataQuery + return buildFiledLineageResult(sinkTable, oriRelNode); } private Tuple2 parseStatement(String sql) { @@ -96,71 +81,6 @@ private Tuple2 parseStatement(String sql) { } } - /** Calling each program's optimize method in sequence. */ - private RelNode optimize(RelNode relNode) { - return flinkChainedProgram.optimize(relNode, new StreamOptimizeContext() { - - @Override - public boolean isBatchMode() { - return false; - } - - @Override - public TableConfig getTableConfig() { - return tableEnv.getConfig(); - } - - @Override - public FunctionCatalog getFunctionCatalog() { - return getPlanner().getFlinkContext().getFunctionCatalog(); - } - - @Override - public CatalogManager getCatalogManager() { - return tableEnv.getCatalogManager(); - } - - @Override - public ModuleManager getModuleManager() { - return getPlanner().getFlinkContext().getModuleManager(); - } - - @Override - public RexFactory getRexFactory() { - return getPlanner().getFlinkContext().getRexFactory(); - } - - @Override - public FlinkRelBuilder getFlinkRelBuilder() { - return getPlanner().createRelBuilder(); - } - - @Override - public boolean isUpdateBeforeRequired() { - return false; - } - - @Override - public MiniBatchInterval getMiniBatchInterval() { - return MiniBatchInterval.NONE; - } - - @Override - public boolean needFinalTimeIndicatorConversion() { - return true; - } - - @Override - public ClassLoader getClassLoader() { - return getPlanner().getFlinkContext().getClassLoader(); - } - - private PlannerBase getPlanner() { - return (PlannerBase) tableEnv.getPlanner(); - } - }); - } - /** Check the size of query and sink fields match */ private void validateSchema(String sinkTable, RelNode relNode, List sinkFieldList) { List queryFieldList = relNode.getRowType().getFieldNames(); @@ -204,7 +124,8 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe String sourceColumn = fieldNames.get(ordinal); // add record - resultList.add(LineageRel.build(sourceTable, sourceColumn, sinkTable, targetColumn)); + resultList.add(LineageRel.build( + sourceTable, sourceColumn, sinkTable, targetColumn, relColumnOrigin.getTransform())); } } } diff --git a/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java new file mode 100644 index 0000000000..fa22d6eec6 --- /dev/null +++ b/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java @@ -0,0 +1,112 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import static org.junit.Assert.assertEquals; + +import org.dinky.data.model.LineageRel; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.EnvironmentSettings; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableEnvironmentImpl; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * @description: LineageContextTest + * @author: HamaWhite + */ +public class LineageContextTest { + + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; + + @BeforeClass + public static void setUp() { + StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(new Configuration()); + + EnvironmentSettings settings = + EnvironmentSettings.newInstance().inStreamingMode().build(); + tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + + context = new LineageContext(tableEnv); + } + + @Before + public void init() { + // create table ST + tableEnv.executeSql("DROP TABLE IF EXISTS ST"); + tableEnv.executeSql("CREATE TABLE ST ( " + " a STRING ," + + " b STRING ," + + " c STRING " + + ") WITH ( " + + " 'connector' = 'datagen' ," + + " 'rows-per-second' = '1' " + + ")"); + + // create table TT + tableEnv.executeSql("DROP TABLE IF EXISTS TT"); + tableEnv.executeSql("CREATE TABLE TT ( " + " A STRING ," + + " B STRING " + + ") WITH ( " + + " 'connector' = 'print' " + + ")"); + } + + @Test + public void testGetLineage() { + List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + String[][] expectedArray = { + {"ST", "a", "TT", "A", "||(a, c)"}, + {"ST", "c", "TT", "A", "||(a, c)"}, + {"ST", "b", "TT", "B", "||(b, c)"}, + {"ST", "c", "TT", "B", "||(b, c)"} + }; + + List expectedList = buildResult(expectedArray); + assertEquals(expectedList, actualList); + } + + private List buildResult(String[][] expectedArray) { + return Stream.of(expectedArray) + .map(e -> { + String transform = e.length == 5 ? e[4] : null; + return new LineageRel( + "default_catalog", + "default_database", + e[0], + e[1], + "default_catalog", + "default_database", + e[2], + e[3], + transform); + }) + .collect(Collectors.toList()); + } +} diff --git a/dinky-client/dinky-client-1.18/pom.xml b/dinky-client/dinky-client-1.18/pom.xml index 59d90e6a31..79c8047f03 100644 --- a/dinky-client/dinky-client-1.18/pom.xml +++ b/dinky-client/dinky-client-1.18/pom.xml @@ -41,6 +41,11 @@ com.sun.xml.bind jaxb-core + + junit + junit + provided + diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java b/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java new file mode 100644 index 0000000000..5fc8dc24cb --- /dev/null +++ b/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelColumnOrigin.java @@ -0,0 +1,116 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.calcite.rel.metadata; + +import org.apache.calcite.plan.RelOptTable; + +/** + * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelColumnOrigin + *

                  + * Modification point: + *

                    + *
                  1. add transform field and related code. + *
                  + * + * @description: RelColumnOrigin is a data structure describing one of the origins of an + * output column produced by a relational expression. + * @author: HamaWhite + */ +public class RelColumnOrigin { + // ~ Instance fields -------------------------------------------------------- + + private final RelOptTable originTable; + + private final int iOriginColumn; + + private final boolean isDerived; + + /** + * Stores the expression for data conversion, + * which source table fields are transformed by which expression the target field + */ + private String transform; + + // ~ Constructors ----------------------------------------------------------- + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + } + + public RelColumnOrigin(RelOptTable originTable, int iOriginColumn, boolean isDerived, String transform) { + this.originTable = originTable; + this.iOriginColumn = iOriginColumn; + this.isDerived = isDerived; + this.transform = transform; + } + + // ~ Methods ---------------------------------------------------------------- + + /** + * Returns table of origin. + */ + public RelOptTable getOriginTable() { + return originTable; + } + + /** + * Returns the 0-based index of column in origin table; whether this ordinal + * is flattened or unflattened depends on whether UDT flattening has already + * been performed on the relational expression which produced this + * description. + */ + public int getOriginColumnOrdinal() { + return iOriginColumn; + } + + /** + * Consider the query select a+b as c, d as e from t. The + * output column c has two origins (a and b), both of them derived. The + * output column d as one origin (c), which is not derived. + * + * @return false if value taken directly from column in origin table; true + * otherwise + */ + public boolean isDerived() { + return isDerived; + } + + public String getTransform() { + return transform; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RelColumnOrigin)) { + return false; + } + RelColumnOrigin other = (RelColumnOrigin) obj; + return originTable.getQualifiedName().equals(other.originTable.getQualifiedName()) + && (iOriginColumn == other.iOriginColumn) + && (isDerived == other.isDerived); + } + + @Override + public int hashCode() { + return originTable.getQualifiedName().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + } +} diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java b/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java index 6bdad4d186..5c8aae002a 100644 --- a/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java +++ b/dinky-client/dinky-client-1.18/src/main/java/org/apache/calcite/rel/metadata/RelMdColumnOrigins.java @@ -36,7 +36,7 @@ import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.core.TableFunctionScan; import org.apache.calcite.rel.core.TableModify; -import org.apache.calcite.rel.core.Window; +import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexFieldAccess; @@ -48,33 +48,47 @@ import org.apache.calcite.rex.RexVisitor; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.util.BuiltInMethod; -import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Modified based on calcite's source code org.apache.calcite.rel.metadata.RelMdColumnOrigins * - *

                  Modification point: 1. Support lookup join, add method getColumnOrigins(Snapshot - * rel,RelMetadataQuery mq, int iOutputColumn) 2. Support watermark, add method - * getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) 3. Support table function, - * add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) 4. Support - * field AS LOCALTIMESTAMP, modify method getColumnOrigins(Calc rel, RelMetadataQuery mq, int - * iOutputColumn) 5. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int - * iOutputColumn) 6. Support ROW_NUMBER(), add method getColumnOrigins(Window rel, RelMetadataQuery - * mq, int iOutputColumn)* + *

                  Modification point: + *

                    + *
                  1. Support lookup join, add method getColumnOrigins(Snapshot rel,RelMetadataQuery mq, int iOutputColumn) + *
                  2. Support watermark, add method getColumnOrigins(SingleRel rel,RelMetadataQuery mq, int iOutputColumn) + *
                  3. Support table function, add method getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) + *
                  4. Support CEP, add method getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) + *
                  5. Support transform, add method createDerivedColumnOrigins(Set inputSet, String transform, boolean originTransform), and related code + *
                  6. Support field AS LOCALTIMESTAMP, modify method getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
                  7. Support PROCTIME() is the first filed, add method computeIndexWithOffset, used by getColumnOrigins(Project rel, RelMetadataQuery mq, int iOutputColumn) + *
                      * - * @description: RelMdColumnOrigins supplies a default implementation of {@link - * RelMetadataQuery#getColumnOrigins} for the standard logical algebra. - * @version: 1.0.0 + * @description: RelMdColumnOrigins supplies a default implementation of {@link RelMetadataQuery#getColumnOrigins} for the standard logical algebra. + * @author: HamaWhite */ public class RelMdColumnOrigins implements MetadataHandler { + private static final Logger LOG = LoggerFactory.getLogger(RelMdColumnOrigins.class); + + public static final String DELIMITER = "."; + public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider.reflectiveSource( BuiltInMethod.COLUMN_ORIGIN.method, new RelMdColumnOrigins()); @@ -98,10 +112,10 @@ public Set getColumnOrigins(Aggregate rel, RelMetadataQuery mq, // Aggregate columns are derived from input columns AggregateCall call = rel.getAggCallList().get(iOutputColumn - rel.getGroupCount()); - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (Integer iInput : call.getArgList()) { Set inputSet = mq.getColumnOrigins(rel.getInput(), iInput); - inputSet = createDerivedColumnOrigins(inputSet); + inputSet = createDerivedColumnOrigins(inputSet, call.toString(), true); if (inputSet != null) { set.addAll(inputSet); } @@ -132,7 +146,9 @@ public Set getColumnOrigins(Join rel, RelMetadataQuery mq, int return set; } - /** Support the field blood relationship of table function */ + /** + * Support the field blood relationship of table function + */ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, int iOutputColumn) { List leftFieldList = rel.getLeft().getRowType().getFieldList(); @@ -142,68 +158,126 @@ public Set getColumnOrigins(Correlate rel, RelMetadataQuery mq, if (iOutputColumn < nLeftColumns) { set = mq.getColumnOrigins(rel.getLeft(), iOutputColumn); } else { - // get the field name of the left table configured in the Table Function on the right - TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); - RexCall rexCall = (RexCall) tableFunctionScan.getCall(); - // support only one field in table function - RexFieldAccess rexFieldAccess = (RexFieldAccess) rexCall.operands.get(0); - String fieldName = rexFieldAccess.getField().getName(); - - int leftFieldIndex = 0; - for (int i = 0; i < nLeftColumns; i++) { - if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { - leftFieldIndex = i; - break; + if (rel.getRight() instanceof TableFunctionScan) { + // get the field name of the left table configured in the Table Function on the right + TableFunctionScan tableFunctionScan = (TableFunctionScan) rel.getRight(); + RexCall rexCall = (RexCall) tableFunctionScan.getCall(); + // support only one field in table function + RexFieldAccess rexFieldAccess = + (RexFieldAccess) rexCall.getOperands().get(0); + String fieldName = rexFieldAccess.getField().getName(); + + int leftFieldIndex = 0; + for (int i = 0; i < nLeftColumns; i++) { + if (leftFieldList.get(i).getName().equalsIgnoreCase(fieldName)) { + leftFieldIndex = i; + break; + } } + /** + * Get the fields from the left table, don't go to + * getColumnOrigins(TableFunctionScan rel,RelMetadataQuery mq, int iOutputColumn), + * otherwise the return is null, and the UDTF field origin cannot be parsed + */ + set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); + + // process transform for udtf + String transform = rexCall.toString().replace(rexFieldAccess.toString(), fieldName) + + DELIMITER + + tableFunctionScan.getRowType().getFieldNames().get(iOutputColumn - nLeftColumns); + set = createDerivedColumnOrigins(set, transform, false); + } else { + set = mq.getColumnOrigins(rel.getRight(), iOutputColumn - nLeftColumns); } - /** - * Get the fields from the left table, don't go to getColumnOrigins(TableFunctionScan - * rel,RelMetadataQuery mq, int iOutputColumn), otherwise the return is null, and the - * UDTF field origin cannot be parsed - */ - set = mq.getColumnOrigins(rel.getLeft(), leftFieldIndex); } return set; } public Set getColumnOrigins(SetOp rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelNode input : rel.getInputs()) { Set inputSet = mq.getColumnOrigins(input, iOutputColumn); if (inputSet == null) { - return null; + return Collections.emptySet(); } set.addAll(inputSet); } return set; } - /** Support the field blood relationship of lookup join */ + /** + * Support the field blood relationship of lookup join + */ public Set getColumnOrigins(Snapshot rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } - /** Support the field blood relationship of watermark */ + /** + * Support the field blood relationship of watermark + */ public Set getColumnOrigins(SingleRel rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } /** - * Support field blood relationship of CEP. The first column is the field after PARTITION BY, - * and the other columns come from the measures in Match + * Support for new fields in the source table similar to those created with the LOCALTIMESTAMP function + */ + public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { + final RelNode input = rel.getInput(); + RexNode rexNode = rel.getProjects().get(iOutputColumn); + + if (rexNode instanceof RexInputRef) { + // Direct reference: no derivation added. + RexInputRef inputRef = (RexInputRef) rexNode; + int index = inputRef.getIndex(); + if (input instanceof TableScan) { + index = computeIndexWithOffset(rel.getProjects(), inputRef.getIndex(), iOutputColumn); + } + return mq.getColumnOrigins(input, index); + } else if (input instanceof TableScan + && rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty()) { + return mq.getColumnOrigins(input, iOutputColumn); + } + // Anything else is a derivation, possibly from multiple columns. + final Set set = getMultipleColumns(rexNode, input, mq); + return createDerivedColumnOrigins(set, rexNode.toString(), true); + } + + private int computeIndexWithOffset(List projects, int baseIndex, int iOutputColumn) { + int offset = 0; + for (int index = 0; index < iOutputColumn; index++) { + RexNode rexNode = projects.get(index); + if ((rexNode.getClass().equals(RexCall.class) + && ((RexCall) rexNode).getOperands().isEmpty())) { + offset += 1; + } + } + return baseIndex + offset; + } + + /** + * Support field blood relationship of CEP. + * The first column is the field after PARTITION BY, and the other columns come from the measures in Match */ public Set getColumnOrigins(Match rel, RelMetadataQuery mq, int iOutputColumn) { - if (iOutputColumn == 0) { + int orderCount = rel.getOrderKeys().getKeys().size(); + + if (iOutputColumn < orderCount) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } final RelNode input = rel.getInput(); - RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - 1); + RexNode rexNode = rel.getMeasures().values().asList().get(iOutputColumn - orderCount); RexPatternFieldRef rexPatternFieldRef = searchRexPatternFieldRef(rexNode); if (rexPatternFieldRef != null) { - return mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + final Set set = mq.getColumnOrigins(input, rexPatternFieldRef.getIndex()); + String originTransform = rexNode instanceof RexCall + ? ((RexCall) rexNode).getOperands().get(0).toString() + : null; + return createDerivedColumnOrigins(set, originTransform, true); } - return null; + return Collections.emptySet(); } private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { @@ -219,46 +293,6 @@ private RexPatternFieldRef searchRexPatternFieldRef(RexNode rexNode) { return null; } - /** Support the field blood relationship of ROW_NUMBER() */ - public Set getColumnOrigins(Window rel, RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - /** - * Haven't found a good way to judge whether the field comes from window, for the time - * being, first judge by parsing the string - */ - String fieldName = rel.getRowType().getFieldNames().get(iOutputColumn); - // for example: "w1$o0" - if (fieldName.startsWith("w") && fieldName.contains("$")) { - int groupIndex = Integer.parseInt(fieldName.substring(1, fieldName.indexOf("$"))); - final Set set = new LinkedHashSet<>(); - if (!rel.groups.isEmpty()) { - Window.Group group = rel.groups.get(groupIndex); - // process partition by keys - group.keys.asList().forEach(index -> set.addAll(mq.getColumnOrigins(input, index))); - // process order by keys - group.orderKeys - .getFieldCollations() - .forEach(e -> set.addAll(mq.getColumnOrigins(input, e.getFieldIndex()))); - } - return set; - } - return mq.getColumnOrigins(rel.getInput(), iOutputColumn); - } - - public Set getColumnOrigins(Project rel, final RelMetadataQuery mq, int iOutputColumn) { - final RelNode input = rel.getInput(); - RexNode rexNode = rel.getProjects().get(iOutputColumn); - - if (rexNode instanceof RexInputRef) { - // Direct reference: no derivation added. - RexInputRef inputRef = (RexInputRef) rexNode; - return mq.getColumnOrigins(input, inputRef.getIndex()); - } - // Anything else is a derivation, possibly from multiple columns. - final Set set = getMultipleColumns(rexNode, input, mq); - return createDerivedColumnOrigins(set); - } - public Set getColumnOrigins(Calc rel, final RelMetadataQuery mq, int iOutputColumn) { final RelNode input = rel.getInput(); final RexShuttle rexShuttle = new RexShuttle() { @@ -277,30 +311,6 @@ public RexNode visitLocalRef(RexLocalRef localRef) { // Direct reference: no derivation added. RexInputRef inputRef = (RexInputRef) rexNode; return mq.getColumnOrigins(input, inputRef.getIndex()); - } else if (rexNode instanceof RexCall && ((RexCall) rexNode).operands.isEmpty()) { - // support for new fields in the source table similar to those created with the - // LOCALTIMESTAMP function - TableSourceTable table = ((TableSourceTable) rel.getInput().getTable()); - if (table != null) { - String targetFieldName = rel.getProgram() - .getOutputRowType() - .getFieldList() - .get(iOutputColumn) - .getName(); - List fieldList = - table.contextResolvedTable().getResolvedSchema().getColumnNames(); - - int index = -1; - for (int i = 0; i < fieldList.size(); i++) { - if (fieldList.get(i).equalsIgnoreCase(targetFieldName)) { - index = i; - break; - } - } - if (index != -1) { - return Collections.singleton(new RelColumnOrigin(table, index, false)); - } - } } // Anything else is a derivation, possibly from multiple columns. final Set set = getMultipleColumns(rexNode, input, mq); @@ -324,14 +334,14 @@ public Set getColumnOrigins(Exchange rel, RelMetadataQuery mq, } public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQuery mq, int iOutputColumn) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); Set mappings = rel.getColumnMappings(); if (mappings == null) { - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // This is a non-leaf transformation: say we don't // know about origins, because there are probably // columns below. - return null; + return Collections.emptySet(); } else { // This is a leaf transformation: say there are fer sure no // column origins. @@ -346,7 +356,7 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ final int column = mapping.iInputColumn; Set origins = mq.getColumnOrigins(input, column); if (origins == null) { - return null; + return Collections.emptySet(); } if (mapping.derived) { origins = createDerivedColumnOrigins(origins); @@ -357,18 +367,19 @@ public Set getColumnOrigins(TableFunctionScan rel, RelMetadataQ } // Catch-all rule when none of the others apply. + @SuppressWarnings("squid:S1172") public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, int iOutputColumn) { // NOTE jvs 28-Mar-2006: We may get this wrong for a physical table // expression which supports projections. In that case, // it's up to the plugin writer to override with the // correct information. - if (rel.getInputs().size() > 0) { + if (!rel.getInputs().isEmpty()) { // No generic logic available for non-leaf rels. - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); RelOptTable table = rel.getTable(); if (table == null) { @@ -383,7 +394,7 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i // names.) This detection assumes the table expression doesn't handle // rename as well. if (table.getRowType() != rel.getRowType()) { - return null; + return Collections.emptySet(); } set.add(new RelColumnOrigin(table, iOutputColumn, false)); @@ -392,9 +403,9 @@ public Set getColumnOrigins(RelNode rel, RelMetadataQuery mq, i private Set createDerivedColumnOrigins(Set inputSet) { if (inputSet == null) { - return null; + return Collections.emptySet(); } - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); for (RelColumnOrigin rco : inputSet) { RelColumnOrigin derived = new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true); set.add(derived); @@ -402,10 +413,113 @@ private Set createDerivedColumnOrigins(Set inp return set; } + private Set createDerivedColumnOrigins( + Set inputSet, String transform, boolean originTransform) { + if (inputSet == null || inputSet.isEmpty()) { + return Collections.emptySet(); + } + final Set set = new LinkedHashSet<>(); + + String finalTransform = originTransform ? computeTransform(inputSet, transform) : transform; + for (RelColumnOrigin rco : inputSet) { + RelColumnOrigin derived = + new RelColumnOrigin(rco.getOriginTable(), rco.getOriginColumnOrdinal(), true, finalTransform); + set.add(derived); + } + return set; + } + + /** + * Replace the variable at the beginning of $ in input with the real field information + */ + private String computeTransform(Set inputSet, String transform) { + LOG.debug("origin transform: {}", transform); + Pattern pattern = Pattern.compile("\\$\\d+"); + Matcher matcher = pattern.matcher(transform); + + Set operandSet = new LinkedHashSet<>(); + while (matcher.find()) { + operandSet.add(matcher.group()); + } + + if (operandSet.isEmpty()) { + LOG.info("operandSet is empty"); + return null; + } + if (inputSet.size() != operandSet.size()) { + LOG.warn( + "The number [{}] of fields in the source tables are not equal to operands [{}]", + inputSet.size(), + operandSet.size()); + return null; + } + + Map sourceColumnMap = new HashMap<>(); + Iterator iterator = optimizeSourceColumnSet(inputSet).iterator(); + operandSet.forEach(e -> sourceColumnMap.put(e, iterator.next())); + LOG.debug("sourceColumnMap: {}", sourceColumnMap); + + matcher = pattern.matcher(transform); + String temp; + while (matcher.find()) { + temp = matcher.group(); + transform = transform.replace(temp, sourceColumnMap.get(temp)); + } + + // temporary special treatment + transform = transform.replace("_UTF-16LE", ""); + LOG.debug("transform: {}", transform); + return transform; + } + + /** + * Increase the readability of transform. + * if catalog, database and table are the same, return field. + * If the catalog and database are the same, return the table and field. + * If the catalog is the same, return the database, table, field. + * Otherwise, return all + */ + private Set optimizeSourceColumnSet(Set inputSet) { + Set catalogSet = new HashSet<>(); + Set databaseSet = new HashSet<>(); + Set tableSet = new HashSet<>(); + Set> qualifiedSet = new LinkedHashSet<>(); + for (RelColumnOrigin rco : inputSet) { + RelOptTable originTable = rco.getOriginTable(); + List qualifiedName = originTable.getQualifiedName(); + + // catalog,database,table,field + List qualifiedList = new ArrayList<>(qualifiedName); + catalogSet.add(qualifiedName.get(0)); + databaseSet.add(qualifiedName.get(1)); + tableSet.add(qualifiedName.get(2)); + + String field = rco.getTransform() != null + ? rco.getTransform() + : originTable.getRowType().getFieldNames().get(rco.getOriginColumnOrdinal()); + qualifiedList.add(field); + qualifiedSet.add(qualifiedList); + } + if (catalogSet.size() == 1 && databaseSet.size() == 1 && tableSet.size() == 1) { + return optimizeName(qualifiedSet, e -> e.get(3)); + } else if (catalogSet.size() == 1 && databaseSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(2, 4))); + } else if (catalogSet.size() == 1) { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e.subList(1, 4))); + } else { + return optimizeName(qualifiedSet, e -> String.join(DELIMITER, e)); + } + } + + private Set optimizeName(Set> qualifiedSet, Function, String> mapper) { + return qualifiedSet.stream().map(mapper).collect(Collectors.toSet()); + } + private Set getMultipleColumns(RexNode rexNode, RelNode input, final RelMetadataQuery mq) { - final Set set = new HashSet<>(); + final Set set = new LinkedHashSet<>(); final RexVisitor visitor = new RexVisitorImpl(true) { + @Override public Void visitInputRef(RexInputRef inputRef) { Set inputSet = mq.getColumnOrigins(input, inputRef.getIndex()); if (inputSet != null) { diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 433de1d248..6cc8e992ba 100644 --- a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -23,7 +23,6 @@ import org.dinky.context.DinkyClassLoaderContextHolder; import org.dinky.data.model.LineageRel; import org.dinky.data.result.SqlExplainResult; -import org.dinky.utils.FlinkStreamProgramWithoutPhysical; import org.dinky.utils.LineageContext; import org.apache.flink.api.dag.Transformation; @@ -51,7 +50,6 @@ import org.apache.flink.table.operations.command.ResetOperation; import org.apache.flink.table.operations.command.SetOperation; import org.apache.flink.table.operations.ddl.CreateTableOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; import org.apache.flink.types.Row; import java.util.ArrayList; @@ -79,13 +77,10 @@ public class CustomTableEnvironmentImpl extends AbstractCustomTableEnvironment { private static final Logger log = LoggerFactory.getLogger(CustomTableEnvironmentImpl.class); - private final FlinkChainedProgram flinkChainedProgram; private static final ObjectMapper mapper = new ObjectMapper(); public CustomTableEnvironmentImpl(StreamTableEnvironment streamTableEnvironment) { super(streamTableEnvironment); - this.flinkChainedProgram = FlinkStreamProgramWithoutPhysical.buildProgram( - (Configuration) getStreamExecutionEnvironment().getConfiguration()); } public static CustomTableEnvironmentImpl create(StreamExecutionEnvironment executionEnvironment) { @@ -256,8 +251,7 @@ private void setConfiguration(StreamExecutionEnvironment environment, Map getLineage(String statement) { - LineageContext lineageContext = - new LineageContext(flinkChainedProgram, (TableEnvironmentImpl) streamTableEnvironment); + LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); return lineageContext.getLineage(statement); } diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/utils/LineageContext.java index 2c31236b97..d707ade42a 100644 --- a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/utils/LineageContext.java @@ -27,23 +27,13 @@ import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; -import org.apache.flink.table.catalog.CatalogManager; -import org.apache.flink.table.catalog.FunctionCatalog; -import org.apache.flink.table.module.ModuleManager; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; -import org.apache.flink.table.planner.calcite.FlinkRelBuilder; -import org.apache.flink.table.planner.calcite.RexFactory; -import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; -import org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram; -import org.apache.flink.table.planner.plan.optimize.program.StreamOptimizeContext; import org.apache.flink.table.planner.plan.schema.TableSourceTable; -import org.apache.flink.table.planner.plan.trait.MiniBatchInterval; import java.util.ArrayList; import java.util.List; @@ -56,11 +46,9 @@ */ public class LineageContext { - private final FlinkChainedProgram flinkChainedProgram; private final TableEnvironmentImpl tableEnv; - public LineageContext(FlinkChainedProgram flinkChainedProgram, TableEnvironmentImpl tableEnv) { - this.flinkChainedProgram = flinkChainedProgram; + public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } @@ -70,11 +58,8 @@ public List getLineage(String statement) { String sinkTable = parsed.getField(0); RelNode oriRelNode = parsed.getField(1); - // 2. Optimize original relNode to generate Optimized Logical Plan - RelNode optRelNode = optimize(oriRelNode); - - // 3. Build lineage based from RelMetadataQuery - return buildFiledLineageResult(sinkTable, optRelNode); + // 2. Build lineage based from RelMetadataQuery + return buildFiledLineageResult(sinkTable, oriRelNode); } private Tuple2 parseStatement(String sql) { @@ -96,71 +81,6 @@ private Tuple2 parseStatement(String sql) { } } - /** Calling each program's optimize method in sequence. */ - private RelNode optimize(RelNode relNode) { - return flinkChainedProgram.optimize(relNode, new StreamOptimizeContext() { - - @Override - public boolean isBatchMode() { - return false; - } - - @Override - public TableConfig getTableConfig() { - return tableEnv.getConfig(); - } - - @Override - public FunctionCatalog getFunctionCatalog() { - return getPlanner().getFlinkContext().getFunctionCatalog(); - } - - @Override - public CatalogManager getCatalogManager() { - return tableEnv.getCatalogManager(); - } - - @Override - public ModuleManager getModuleManager() { - return getPlanner().getFlinkContext().getModuleManager(); - } - - @Override - public RexFactory getRexFactory() { - return getPlanner().getFlinkContext().getRexFactory(); - } - - @Override - public FlinkRelBuilder getFlinkRelBuilder() { - return getPlanner().createRelBuilder(); - } - - @Override - public boolean isUpdateBeforeRequired() { - return false; - } - - @Override - public MiniBatchInterval getMiniBatchInterval() { - return MiniBatchInterval.NONE; - } - - @Override - public boolean needFinalTimeIndicatorConversion() { - return true; - } - - @Override - public ClassLoader getClassLoader() { - return getPlanner().getFlinkContext().getClassLoader(); - } - - private PlannerBase getPlanner() { - return (PlannerBase) tableEnv.getPlanner(); - } - }); - } - /** Check the size of query and sink fields match */ private void validateSchema(String sinkTable, RelNode relNode, List sinkFieldList) { List queryFieldList = relNode.getRowType().getFieldNames(); @@ -204,7 +124,8 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe String sourceColumn = fieldNames.get(ordinal); // add record - resultList.add(LineageRel.build(sourceTable, sourceColumn, sinkTable, targetColumn)); + resultList.add(LineageRel.build( + sourceTable, sourceColumn, sinkTable, targetColumn, relColumnOrigin.getTransform())); } } } diff --git a/dinky-client/dinky-client-1.18/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.18/src/test/java/org/dinky/utils/LineageContextTest.java new file mode 100644 index 0000000000..fa22d6eec6 --- /dev/null +++ b/dinky-client/dinky-client-1.18/src/test/java/org/dinky/utils/LineageContextTest.java @@ -0,0 +1,112 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import static org.junit.Assert.assertEquals; + +import org.dinky.data.model.LineageRel; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.EnvironmentSettings; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.api.internal.TableEnvironmentImpl; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * @description: LineageContextTest + * @author: HamaWhite + */ +public class LineageContextTest { + + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; + + @BeforeClass + public static void setUp() { + StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(new Configuration()); + + EnvironmentSettings settings = + EnvironmentSettings.newInstance().inStreamingMode().build(); + tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + + context = new LineageContext(tableEnv); + } + + @Before + public void init() { + // create table ST + tableEnv.executeSql("DROP TABLE IF EXISTS ST"); + tableEnv.executeSql("CREATE TABLE ST ( " + " a STRING ," + + " b STRING ," + + " c STRING " + + ") WITH ( " + + " 'connector' = 'datagen' ," + + " 'rows-per-second' = '1' " + + ")"); + + // create table TT + tableEnv.executeSql("DROP TABLE IF EXISTS TT"); + tableEnv.executeSql("CREATE TABLE TT ( " + " A STRING ," + + " B STRING " + + ") WITH ( " + + " 'connector' = 'print' " + + ")"); + } + + @Test + public void testGetLineage() { + List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + String[][] expectedArray = { + {"ST", "a", "TT", "A", "||(a, c)"}, + {"ST", "c", "TT", "A", "||(a, c)"}, + {"ST", "b", "TT", "B", "||(b, c)"}, + {"ST", "c", "TT", "B", "||(b, c)"} + }; + + List expectedList = buildResult(expectedArray); + assertEquals(expectedList, actualList); + } + + private List buildResult(String[][] expectedArray) { + return Stream.of(expectedArray) + .map(e -> { + String transform = e.length == 5 ? e[4] : null; + return new LineageRel( + "default_catalog", + "default_database", + e[0], + e[1], + "default_catalog", + "default_database", + e[2], + e[3], + transform); + }) + .collect(Collectors.toList()); + } +} diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java index d15c1df452..befb85f874 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java @@ -19,6 +19,8 @@ package org.dinky.data.model; +import java.util.Objects; + /** * LineageResult * @@ -42,6 +44,12 @@ public class LineageRel { private final String targetColumn; + /** + * Stores the expression for data conversion, + * which source table fields are transformed by which expression the target field + */ + private String transform; + private static final String DELIMITER = "."; public LineageRel( @@ -52,7 +60,8 @@ public LineageRel( String targetCatalog, String targetDatabase, String targetTable, - String targetColumn) { + String targetColumn, + String transform) { this.sourceCatalog = sourceCatalog; this.sourceDatabase = sourceDatabase; this.sourceTable = sourceTable; @@ -61,10 +70,15 @@ public LineageRel( this.targetDatabase = targetDatabase; this.targetTable = targetTable; this.targetColumn = targetColumn; + this.transform = transform; } public static LineageRel build( - String sourceTablePath, String sourceColumn, String targetTablePath, String targetColumn) { + String sourceTablePath, + String sourceColumn, + String targetTablePath, + String targetColumn, + String transform) { String[] sourceItems = sourceTablePath.split("\\."); String[] targetItems = targetTablePath.split("\\."); @@ -76,7 +90,8 @@ public static LineageRel build( targetItems[0], targetItems[1], targetItems[2], - targetColumn); + targetColumn, + transform); } public static LineageRel build( @@ -87,7 +102,8 @@ public static LineageRel build( String targetCatalog, String targetDatabase, String targetTable, - String targetColumn) { + String targetColumn, + String transform) { return new LineageRel( sourceCatalog, sourceDatabase, @@ -96,7 +112,8 @@ public static LineageRel build( targetCatalog, targetDatabase, targetTable, - targetColumn); + targetColumn, + transform); } public String getSourceCatalog() { @@ -138,4 +155,40 @@ public String getSourceTablePath() { public String getTargetTablePath() { return targetCatalog + DELIMITER + targetDatabase + DELIMITER + targetTable; } + + public String getTransform() { + return transform; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + LineageRel that = (LineageRel) o; + + if (!sourceCatalog.equals(that.sourceCatalog)) return false; + if (!sourceDatabase.equals(that.sourceDatabase)) return false; + if (!sourceTable.equals(that.sourceTable)) return false; + if (!sourceColumn.equals(that.sourceColumn)) return false; + if (!targetCatalog.equals(that.targetCatalog)) return false; + if (!targetDatabase.equals(that.targetDatabase)) return false; + if (!targetTable.equals(that.targetTable)) return false; + if (!targetColumn.equals(that.targetColumn)) return false; + return Objects.equals(transform, that.transform); + } + + @Override + public int hashCode() { + int result = sourceCatalog.hashCode(); + result = 31 * result + sourceDatabase.hashCode(); + result = 31 * result + sourceTable.hashCode(); + result = 31 * result + sourceColumn.hashCode(); + result = 31 * result + targetCatalog.hashCode(); + result = 31 * result + targetDatabase.hashCode(); + result = 31 * result + targetTable.hashCode(); + result = 31 * result + targetColumn.hashCode(); + result = 31 * result + (transform != null ? transform.hashCode() : 0); + return result; + } } diff --git a/pom.xml b/pom.xml index b81a889b6e..48cfa2a939 100644 --- a/pom.xml +++ b/pom.xml @@ -959,7 +959,7 @@ flink-1.15 1.15 - 1.15.2 + 1.15.4 @@ -968,7 +968,7 @@ flink-1.16 1.16 - 1.16.0 + 1.16.2 From 1cdfd5f977e1e71ca65f805dd59ff4183e0d965d Mon Sep 17 00:00:00 2001 From: gaoyan Date: Fri, 3 Nov 2023 16:28:06 +0800 Subject: [PATCH 02/21] Bug fixs (#2487) * Optimize the process * fix cluster json bug * fix cluster cancel bug * add check on online * fix offline bug * fix job life bug * limitTheParametersPassedToATask * formate code * formate code * formate code * remove jar config * Optimized the automatic process build format * Added code comparison filter * fix dinky-app env bug * fix dinky-app env bug * formate coed * fix some code * fix k8s client --- .../java/org/dinky/aop/ProcessAspect.java | 6 +- .../dinky/context/ConsoleContextHolder.java | 2 +- .../org/dinky/controller/TaskController.java | 21 +-- .../main/java/org/dinky/data/dto/TaskDTO.java | 6 - .../java/org/dinky/data/dto/TaskSaveDTO.java | 151 ++++++++++++++++++ .../mapping/ClusterConfigurationMapping.java | 3 +- .../dinky/job/handler/JobRefreshHandler.java | 5 +- .../java/org/dinky/service/TaskService.java | 3 +- .../dinky/service/impl/TaskServiceImpl.java | 59 +++---- .../org/dinky/app/flinksql/Submitter.java | 2 +- .../org/dinky/data/enums/JobLifeCycle.java | 14 +- .../gateway/kubernetes/KubernetesGateway.java | 10 +- .../DataStudio/HeaderContainer/index.tsx | 16 +- .../DataStudio/HeaderContainer/service.tsx | 8 +- .../MiddleContainer/Editor/constants.tsx | 34 ++-- dinky-web/src/pages/DataStudio/model.ts | 1 - .../1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql | 3 +- 17 files changed, 255 insertions(+), 89 deletions(-) create mode 100644 dinky-admin/src/main/java/org/dinky/data/dto/TaskSaveDTO.java diff --git a/dinky-admin/src/main/java/org/dinky/aop/ProcessAspect.java b/dinky-admin/src/main/java/org/dinky/aop/ProcessAspect.java index 1b36bc5ced..677cabeb38 100644 --- a/dinky-admin/src/main/java/org/dinky/aop/ProcessAspect.java +++ b/dinky-admin/src/main/java/org/dinky/aop/ProcessAspect.java @@ -110,10 +110,8 @@ public Object processStepAround(ProceedingJoinPoint joinPoint, ProcessStep proce contextHolder.finishedStep(MDC.get(PROCESS_NAME), step, ProcessStatus.FAILED, e); throw e; } finally { - // If a parent step exists, it is restored after the execution is complete - if (parentStep != null) { - MDC.put(PROCESS_STEP, parentStep); - } + // restored after the execution is complete + MDC.put(PROCESS_STEP, parentStep); } return result; } diff --git a/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java b/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java index a64ae4b58c..f6295da2ce 100644 --- a/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java +++ b/dinky-admin/src/main/java/org/dinky/context/ConsoleContextHolder.java @@ -195,7 +195,7 @@ public void finishedProcess(String processName, ProcessStatus status, Throwable if (e != null) { appendLog(processName, null, LogUtil.getError(e.getCause()), true); } - String filePath = String.format("%s/tmp/log/%s.json", System.getProperty("user.dir"), process.getTitle()); + String filePath = String.format("%s/tmp/log/%s.json", System.getProperty("user.dir"), processName); if (FileUtil.exist(filePath)) { Assert.isTrue(FileUtil.del(filePath)); } diff --git a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java index 1457d39ab2..bc23c4fe79 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java @@ -25,11 +25,13 @@ import org.dinky.data.dto.DebugDTO; import org.dinky.data.dto.TaskDTO; import org.dinky.data.dto.TaskRollbackVersionDTO; +import org.dinky.data.dto.TaskSaveDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.JobLifeCycle; import org.dinky.data.enums.ProcessType; import org.dinky.data.enums.Status; import org.dinky.data.exception.NotSupportExplainExcepition; +import org.dinky.data.exception.SqlExplainExcepition; import org.dinky.data.model.Task; import org.dinky.data.result.ProTableResult; import org.dinky.data.result.Result; @@ -134,11 +136,12 @@ public Result savepoint(@RequestParam Integer taskId, @RequestP Status.EXECUTE_SUCCESS); } - @GetMapping("/onLineTask") - @Log(title = "onLineTask", businessType = BusinessType.TRIGGER) - @ApiOperation("onLineTask") - public Result onLineTask(@RequestParam Integer taskId) { - if (taskService.changeTaskLifeRecyle(taskId, JobLifeCycle.ONLINE)) { + @GetMapping("/changeTaskLife") + @Log(title = "changeTaskLife", businessType = BusinessType.TRIGGER) + @ApiOperation("changeTaskLife") + public Result changeTaskLife(@RequestParam Integer taskId, @RequestParam Integer lifeCycle) + throws SqlExplainExcepition { + if (taskService.changeTaskLifeRecyle(taskId, JobLifeCycle.get(lifeCycle))) { return Result.succeed(Status.PUBLISH_SUCCESS); } else { return Result.failed(Status.PUBLISH_FAILED); @@ -165,11 +168,11 @@ public Result getJobPlan(@ProcessId @RequestBody TaskDTO taskDTO) { name = "task", value = "Task", required = true, - dataType = "Task", + dataType = "TaskSaveDTO", paramType = "body", - dataTypeClass = Task.class) - public Result saveOrUpdateTask(@RequestBody Task task) { - if (taskService.saveOrUpdateTask(task)) { + dataTypeClass = TaskSaveDTO.class) + public Result saveOrUpdateTask(@RequestBody TaskSaveDTO task) { + if (taskService.saveOrUpdateTask(task.toTaskEntity())) { return Result.succeed(Status.SAVE_SUCCESS); } else { return Result.failed(Status.SAVE_FAILED); diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java index a346d66846..4e724cb71b 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java @@ -119,9 +119,6 @@ public class TaskDTO extends AbstractStatementDTO { notes = "The identifier of the database") private Integer databaseId; - @ApiModelProperty(value = "JAR ID", dataType = "Integer", example = "4", notes = "The identifier of the JAR") - private Integer jarId; - @ApiModelProperty( value = "Alert Group ID", dataType = "Integer", @@ -174,9 +171,6 @@ public class TaskDTO extends AbstractStatementDTO { @ApiModelProperty(value = "Path", dataType = "String", notes = "Path associated with the task") private String path; - @ApiModelProperty(value = "JAR Name", dataType = "String", notes = "Name of the associated JAR") - private String jarName; - @ApiModelProperty( value = "Cluster Configuration Name", dataType = "String", diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskSaveDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskSaveDTO.java new file mode 100644 index 0000000000..83a3ba2b33 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskSaveDTO.java @@ -0,0 +1,151 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.dto; + +import org.dinky.data.model.Task; +import org.dinky.data.model.TaskExtConfig; +import org.dinky.data.typehandler.JSONObjectHandler; +import org.dinky.mybatis.annotation.Save; + +import org.apache.ibatis.type.JdbcType; + +import javax.validation.constraints.NotNull; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; + +import cn.hutool.core.bean.BeanUtil; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +@Data +public class TaskSaveDTO { + + /** 主键ID */ + @TableId(value = "id", type = IdType.AUTO) + @ApiModelProperty(value = "ID", required = true, dataType = "Integer", example = "1", notes = "Primary Key") + private Integer id; + + @NotNull( + message = "Name cannot be null", + groups = {Save.class}) + @ApiModelProperty(value = "Name", required = true, dataType = "String", example = "Name") + private String name; + + @NotNull( + message = "Enabled cannot be null", + groups = {Save.class}) + @ApiModelProperty(value = "Enabled", required = true, dataType = "Boolean", example = "true") + private Boolean enabled; + + @ApiModelProperty(value = "Dialect", dataType = "String", notes = "Dialect for the task") + private String dialect; + + @ApiModelProperty(value = "Type", dataType = "String", notes = "Type of the task") + private String type; + + @ApiModelProperty(value = "Check Point", dataType = "Integer", example = "1", notes = "Check point for the task") + private Integer checkPoint; + + @ApiModelProperty(value = "Save point strategy", dataType = "SavePointStrategy", notes = "Save point strategy") + private Integer savePointStrategy; + + @ApiModelProperty(value = "Save Point Path", dataType = "String", notes = "Save point path for the task") + private String savePointPath; + + @ApiModelProperty(value = "Parallelism", dataType = "Integer", example = "4", notes = "Parallelism for the task") + private Integer parallelism; + + @ApiModelProperty( + value = "Fragment", + dataType = "Boolean", + example = "true", + notes = "Fragment option for the task") + private Boolean fragment; + + @ApiModelProperty( + value = "Statement Set", + dataType = "Boolean", + example = "false", + notes = "Statement set option for the task") + private Boolean statementSet; + + @ApiModelProperty( + value = "Batch Model", + dataType = "Boolean", + example = "true", + notes = "Batch model option for the task") + private Boolean batchModel; + + @ApiModelProperty( + value = "ClusterInstance ID", + dataType = "Integer", + example = "2001", + notes = "ID of the cluster associated with the task") + private Integer clusterId; + + @ApiModelProperty( + value = "Cluster Configuration ID", + dataType = "Integer", + example = "3001", + notes = "ID of the cluster configuration associated with the task") + private Integer clusterConfigurationId; + + @ApiModelProperty( + value = "Database ID", + dataType = "Integer", + example = "4001", + notes = "ID of the database associated with the task") + private Integer databaseId; + + @ApiModelProperty( + value = "Environment ID", + dataType = "Integer", + example = "6001", + notes = "ID of the environment associated with the task") + private Integer envId; + + @ApiModelProperty( + value = "Alert Group ID", + dataType = "Integer", + example = "7001", + notes = "ID of the alert group associated with the task") + private Integer alertGroupId; + + @ApiModelProperty( + value = "Configuration JSON", + dataType = "TaskExtConfig", + notes = "Extended configuration in JSON format for the task") + @TableField(typeHandler = JSONObjectHandler.class, jdbcType = JdbcType.VARCHAR) + private TaskExtConfig configJson; + + @ApiModelProperty(value = "Note", dataType = "String", notes = "Additional notes for the task") + private String note; + + @ApiModelProperty(value = "Statement", dataType = "String", notes = "SQL statement for the task") + private String statement; + + public Task toTaskEntity() { + Task task = new Task(); + BeanUtil.copyProperties(this, task); + return task; + } +} diff --git a/dinky-admin/src/main/java/org/dinky/data/model/mapping/ClusterConfigurationMapping.java b/dinky-admin/src/main/java/org/dinky/data/model/mapping/ClusterConfigurationMapping.java index cafc218a3d..b8a09c9e30 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/mapping/ClusterConfigurationMapping.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/mapping/ClusterConfigurationMapping.java @@ -20,6 +20,7 @@ package org.dinky.data.model.mapping; import org.dinky.data.model.ClusterConfiguration; +import org.dinky.gateway.model.FlinkClusterConfig; import java.time.LocalDateTime; @@ -75,7 +76,7 @@ public class ClusterConfigurationMapping { dataType = "String", example = "test", notes = "cluster config json") - private String configJson; + private FlinkClusterConfig configJson; @ApiModelProperty( value = "isAvailable", diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java index 3bfe185241..38051af6c5 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java @@ -55,7 +55,6 @@ import com.alibaba.fastjson2.JSON; import com.fasterxml.jackson.databind.JsonNode; -import cn.hutool.json.JSONObject; import cn.hutool.json.JSONUtil; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -249,8 +248,8 @@ private static void handleJobDone(JobInfoDetail jobInfoDetail) { if (GatewayType.isDeployCluster(clusterType)) { JobConfig jobConfig = new JobConfig(); - String configJson = jobDataDto.getClusterConfiguration().getConfigJson(); - jobConfig.buildGatewayConfig(new JSONObject(configJson).toBean(FlinkClusterConfig.class)); + FlinkClusterConfig configJson = jobDataDto.getClusterConfiguration().getConfigJson(); + jobConfig.buildGatewayConfig(configJson); jobConfig.getGatewayConfig().setType(GatewayType.get(clusterType)); jobConfig.getGatewayConfig().getFlinkConfig().setJobName(jobInstance.getName()); Gateway.build(jobConfig.getGatewayConfig()).onJobFinishCallback(jobInstance.getStatus()); diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskService.java b/dinky-admin/src/main/java/org/dinky/service/TaskService.java index 7701d59004..d4aaab016b 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskService.java @@ -26,6 +26,7 @@ import org.dinky.data.enums.JobLifeCycle; import org.dinky.data.exception.ExcuteException; import org.dinky.data.exception.NotSupportExplainExcepition; +import org.dinky.data.exception.SqlExplainExcepition; import org.dinky.data.model.JobModelOverview; import org.dinky.data.model.JobTypeOverView; import org.dinky.data.model.Task; @@ -162,7 +163,7 @@ public interface TaskService extends ISuperService { * @param lifeCycle The new life cycle of the task. * @return true if the life cycle is successfully changed, false otherwise. */ - boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle); + boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) throws SqlExplainExcepition; /** * Save or update the given task. diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index b85e129aee..26b93a2537 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -167,7 +167,7 @@ private String[] buildParams(int id) { } @ProcessStep(type = ProcessStepType.SUBMIT_PRECHECK) - public void preCheckTask(TaskDTO task) throws TaskNotDoneException, SqlExplainExcepition { + public void preCheckTask(TaskDTO task) throws TaskNotDoneException { log.info("Start check and config task, task:{}", task.getName()); Assert.notNull(task, Status.TASK_NOT_EXIST.getMessage()); @@ -180,21 +180,6 @@ public void preCheckTask(TaskDTO task) throws TaskNotDoneException, SqlExplainEx throw new BusException(Status.TASK_STATUS_IS_NOT_DONE.getMessage()); } } - - log.info("Start explain Sql,task: {},Dialect:{}", task.getName(), task.getDialect()); - - List sqlExplainResults = explainTask(task); - for (SqlExplainResult sqlExplainResult : sqlExplainResults) { - if (!sqlExplainResult.isParseTrue() || !sqlExplainResult.isExplainTrue()) { - throw new SqlExplainExcepition(StrFormatter.format( - "task [{}] sql explain failed, sql [{}], error: [{}]", - task.getName(), - sqlExplainResult.getSql(), - sqlExplainResult.getError())); - } - } - - log.info("Explain Sql finish"); } @ProcessStep(type = ProcessStepType.SUBMIT_EXECUTE) @@ -221,11 +206,17 @@ public JobConfig buildJobConfig(TaskDTO task) { flinkClusterCfg.getAppConfig().setUserJarParas(buildParams(config.getTaskId())); flinkClusterCfg.getAppConfig().setUserJarMainAppClass(CommonConstant.DINKY_APP_MAIN_CLASS); config.buildGatewayConfig(flinkClusterCfg); + Optional.ofNullable(task.getJobInstanceId()).ifPresent(i -> { + JobInstance jobInstance = jobInstanceService.getById(i); + config.setClusterId(jobInstance.getClusterId()); + }); } else { - log.info("Init remote cluster"); - String address = clusterInstanceService.buildEnvironmentAddress(config.isUseRemote(), task.getClusterId()); - config.setAddress(address); + Optional.ofNullable(task.getClusterId()).ifPresent(config::setClusterId); } + log.info("Init remote cluster"); + Optional.ofNullable(config.getClusterId()).ifPresent(i -> { + config.setAddress(clusterInstanceService.buildEnvironmentAddress(config.isUseRemote(), i)); + }); return config; } @@ -263,6 +254,10 @@ public JobResult submitTask(Integer id, String savePointPath) throws Exception { } // 注解自调用会失效,这里通过获取对象方法绕过此限制 TaskServiceImpl taskServiceBean = applicationContext.getBean(TaskServiceImpl.class); + taskServiceBean.preCheckTask(taskDTO); + // The job instance does not exist by default, + // so that it does not affect other operations, such as checking the jobmanager address + taskDTO.setJobInstanceId(null); JobResult jobResult = taskServiceBean.executeJob(taskDTO); log.info("Job Submit success"); Task task = new Task(id, jobResult.getJobInstanceId()); @@ -427,19 +422,29 @@ public void initTenantByTaskId(Integer id) { } @Override - public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) { - TaskDTO taskInfoById = getTaskInfoById(taskId); - taskInfoById.setStep(lifeCycle.getValue()); - if (lifeCycle == JobLifeCycle.ONLINE) { - taskVersionService.createTaskVersionSnapshot(taskInfoById); - } - return saveOrUpdate(taskInfoById.buildTask()); + public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) throws SqlExplainExcepition { + TaskDTO task = getTaskInfoById(taskId); + task.setStep(lifeCycle.getValue()); + if (lifeCycle == JobLifeCycle.PUBLISH) { + // List sqlExplainResults = explainTask(task); + // for (SqlExplainResult sqlExplainResult : sqlExplainResults) { + // if (!sqlExplainResult.isParseTrue() || !sqlExplainResult.isExplainTrue()) { + // throw new SqlExplainExcepition(StrFormatter.format( + // "task [{}] sql explain failed, sql [{}], error: [{}]", + // task.getName(), + // sqlExplainResult.getSql(), + // sqlExplainResult.getError())); + // } + // } + taskVersionService.createTaskVersionSnapshot(task); + } + return saveOrUpdate(task.buildTask()); } @Override public boolean saveOrUpdateTask(Task task) { - if (JobLifeCycle.ONLINE.equalsValue(task.getStep())) { + if (JobLifeCycle.PUBLISH.equalsValue(task.getStep())) { throw new BusException(Status.TASK_IS_ONLINE.getMessage()); } diff --git a/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java b/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java index 1a2c0b1864..63bb35abe9 100644 --- a/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java +++ b/dinky-app/dinky-app-base/src/main/java/org/dinky/app/flinksql/Submitter.java @@ -103,7 +103,7 @@ public static void submit(AppParamConfig config) throws SQLException { public static String buildSql(AppTask appTask) throws SQLException { StringBuilder sb = new StringBuilder(); // build env task - if (Asserts.isNotNull(appTask.getEnvId())) { + if (Asserts.isNotNull(appTask.getEnvId()) && appTask.getEnvId() > 0) { AppTask envTask = DBUtil.getTask(appTask.getEnvId()); if (Asserts.isNotNullString(envTask.getStatement())) { log.info("use statement is enable, load env:{}", envTask.getName()); diff --git a/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java b/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java index 73cb91d520..048a796545 100644 --- a/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java +++ b/dinky-common/src/main/java/org/dinky/data/enums/JobLifeCycle.java @@ -27,26 +27,20 @@ * @since 2022/2/1 16:37 */ public enum JobLifeCycle { - UNKNOWN(0, "未知"), - DEVELOP(1, "开发"), - ONLINE(2, "上线"); + UNKNOWN(0), + DEVELOP(1), + PUBLISH(2); private Integer value; - private String label; - JobLifeCycle(Integer value, String label) { + JobLifeCycle(Integer value) { this.value = value; - this.label = label; } public Integer getValue() { return value; } - public String getLabel() { - return label; - } - public static JobLifeCycle get(Integer value) { return Arrays.stream(values()) .filter(item -> item.getValue().equals(value)) diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/kubernetes/KubernetesGateway.java b/dinky-gateway/src/main/java/org/dinky/gateway/kubernetes/KubernetesGateway.java index 7f37a48510..a5295fab58 100644 --- a/dinky-gateway/src/main/java/org/dinky/gateway/kubernetes/KubernetesGateway.java +++ b/dinky-gateway/src/main/java/org/dinky/gateway/kubernetes/KubernetesGateway.java @@ -39,8 +39,10 @@ import org.apache.flink.kubernetes.kubeclient.Fabric8FlinkKubeClient; import org.apache.flink.kubernetes.kubeclient.FlinkKubeClient; import org.apache.flink.kubernetes.kubeclient.FlinkKubeClientFactory; +import org.apache.http.util.TextUtils; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import java.util.Collections; import cn.hutool.core.io.FileUtil; @@ -111,7 +113,13 @@ private void preparPodTemplate(String podTemplate, ConfigOption option) private void initKubeClient() { client = FlinkKubeClientFactory.getInstance().fromConfiguration(configuration, "client"); - kubernetesClient = new DefaultKubernetesClient(); + String kubeFile = configuration.getString(KubernetesConfigOptions.KUBE_CONFIG_FILE); + if (TextUtils.isEmpty(kubeFile)) { + kubernetesClient = new DefaultKubernetesClient(); + } else { + String kubeStr = FileUtil.readString(kubeFile, StandardCharsets.UTF_8); + kubernetesClient = DefaultKubernetesClient.fromConfig(kubeStr); + } } public SavePointResult savepointCluster(String savePoint) { diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx index 3ce66ca5db..0756abb6d9 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/index.tsx @@ -30,10 +30,10 @@ import { } from '@/pages/DataStudio/HeaderContainer/function'; import { cancelTask, + changeTaskLife, debugTask, executeSql, - getJobPlan, - onLineTask + getJobPlan } from '@/pages/DataStudio/HeaderContainer/service'; import { StateType, TabsPageSubType, TabsPageType, VIEW } from '@/pages/DataStudio/model'; import { JOB_LIFE_CYCLE, JOB_STATUS } from '@/pages/DevOps/constants'; @@ -182,12 +182,20 @@ const HeaderContainer = (props: any) => { const handleChangeJobLife = async () => { if (!currentData) return; if (isOnline(currentData)) { - await cancelTask(l('global.table.lifecycle.offline'), currentData.id); + await changeTaskLife( + l('global.table.lifecycle.offline'), + currentData.id, + JOB_LIFE_CYCLE.DEVELOP + ); currentData.step = JOB_LIFE_CYCLE.DEVELOP; } else { const saved = await handleSave(); if (saved) { - await onLineTask(l('global.table.lifecycle.publishing'), currentData.id); + await changeTaskLife( + l('global.table.lifecycle.publishing'), + currentData.id, + JOB_LIFE_CYCLE.PUBLISH + ); currentData.step = JOB_LIFE_CYCLE.PUBLISH; } } diff --git a/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx b/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx index 9e33f994be..bee5d82c0b 100644 --- a/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx +++ b/dinky-web/src/pages/DataStudio/HeaderContainer/service.tsx @@ -41,12 +41,8 @@ export function cancelTask(title: string, id: number) { return handleGetOption('api/task/cancel', title, { id }); } -export function onLineTask(title = '', id: number) { - return handleGetOption('api/task/onLineTask', title, { taskId: id }); -} - -export function offLinelTask(id: number) { - return handleGetOption('api/task/cancel', '', { taskId: id }); +export function changeTaskLife(title = '', id: number, life: number) { + return handleGetOption('api/task/changeTaskLife', title, { taskId: id, lifeCycle: life }); } export const isSql = (dialect: string) => { diff --git a/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx b/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx index 5b0c43b4f4..3043d3a2ed 100644 --- a/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx +++ b/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx @@ -1,19 +1,19 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ @@ -49,5 +49,13 @@ export const TASK_VAR_FILTER = [ 'useAutoCancel', 'status', 'step', - 'jobConfig' + 'jobConfig', + 'note', + 'step', + 'versionId', + 'clusterName', + 'clusterConfigurationName', + 'databaseName', + 'envName', + 'alertGroupName', ]; diff --git a/dinky-web/src/pages/DataStudio/model.ts b/dinky-web/src/pages/DataStudio/model.ts index 38b2f565a6..dc88f8e035 100644 --- a/dinky-web/src/pages/DataStudio/model.ts +++ b/dinky-web/src/pages/DataStudio/model.ts @@ -90,7 +90,6 @@ export type TaskType = { clusterConfigurationName?: string; databaseId?: number; databaseName?: string; - jarId?: number; envId?: number; jobInstanceId?: number; note?: string; diff --git a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql index ee44ad9158..a1f76f286c 100644 --- a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql +++ b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_dml.sql @@ -278,7 +278,8 @@ LEFT JOIN -- 删除dinky_job_history 的 jar_json 字段 alter table dinky_job_history drop column jar_json; alter table dinky_task drop column jar_id; - +UPDATE dinky_task_version SET task_configure=JSON_REMOVE(task_configure, '$.jarId'); +UPDATE dinky_history SET config_json=JSON_REMOVE(config_json, '$.jarId'); insert into `dinky_flink_document` values (218, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE print', 'Whole library synchronization print', 'EXECUTE CDCSOURCE demo_print WITH ( ''connector'' = ''mysql-cdc'', From 089efcd250cdb6dcd86574b6defb39b1ea207c65 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Fri, 3 Nov 2023 20:37:29 +0800 Subject: [PATCH 03/21] [Feature][UDF]feature_add_udf_manage (#2476) * feature_add_udf_manage * feature_add_udf_manage * support udf register manage web * feature_add_udf_manage * feature_add_udf_manage * feature_add_udf_manage * feature_add_udf_manage * feature_add_udf_manage --------- Co-authored-by: zhu-mingye <934230207@qq.com> --- .../org/dinky/controller/UDFController.java | 95 +++++++ .../java/org/dinky/data/dto/CommonDTO.java | 33 +++ .../java/org/dinky/data/model/UDFManage.java | 51 ++++ .../java/org/dinky/data/vo/UDFManageVO.java | 44 ++++ .../org/dinky/mapper/UDFManageMapper.java | 36 +++ .../java/org/dinky/service/UDFService.java | 38 ++- .../dinky/service/impl/UDFServiceImpl.java | 130 +++++++++- .../service/resource/BaseResourceManager.java | 2 +- .../service/resource/ResourcesService.java | 16 ++ .../resource/impl/HdfsResourceManager.java | 4 +- .../resource/impl/OssResourceManager.java | 4 +- .../resource/impl/ResourceServiceImpl.java | 25 ++ .../java/org/dinky/service/task/UdfTask.java | 22 +- .../java/org/dinky/url/RsURLConnection.java | 12 +- .../org/dinky/url/RsURLStreamHandler.java | 3 +- .../main/java/org/dinky/utils/RSUtils.java | 35 +++ dinky-admin/src/main/resources/db/db-h2.sql | 11 + .../main/resources/mapper/UDFManageMapper.xml | 30 +++ .../main/java/org/dinky/utils/URLUtils.java | 12 +- .../org/dinky/function/FunctionFactory.java | 11 +- .../java/org/dinky/function/util/UDFUtil.java | 25 +- .../Transfer/TreeTransfer/index.tsx | 90 +++++++ dinky-web/src/global.less | 4 +- dinky-web/src/locales/en-US/menu.ts | 2 +- dinky-web/src/locales/zh-CN/menu.ts | 2 +- .../UDF/components/UDFRegister/index.tsx | 231 ++++++++++++++++++ .../UDF/components/UDFRegister/service.tsx | 29 +++ .../TemplateModal/TemplateProFrom/index.tsx | 26 +- .../{ => UDFTemplate}/TemplateModal/index.tsx | 28 ++- .../{ => UDFTemplate}/TemplateTable/index.tsx | 28 ++- .../UDFTemplateDesc/index.tsx | 46 ++++ .../UDFTemplate/UDFTemplateDrawer/index.tsx | 43 ++++ .../UDFTemplateDesc/index.tsx | 46 ---- .../components/UDFTemplateDrawer/index.tsx | 43 ---- dinky-web/src/pages/RegCenter/UDF/index.tsx | 57 +++-- dinky-web/src/services/endpoints.tsx | 33 +-- dinky-web/src/types/RegCenter/data.d.ts | 61 ++++- pom.xml | 1 + script/sql/dinky-mysql.sql | 12 + script/sql/dinky-pg.sql | 37 +++ .../1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql | 12 + 41 files changed, 1258 insertions(+), 212 deletions(-) create mode 100644 dinky-admin/src/main/java/org/dinky/controller/UDFController.java create mode 100644 dinky-admin/src/main/java/org/dinky/data/dto/CommonDTO.java create mode 100644 dinky-admin/src/main/java/org/dinky/data/model/UDFManage.java create mode 100644 dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java create mode 100644 dinky-admin/src/main/java/org/dinky/mapper/UDFManageMapper.java create mode 100644 dinky-admin/src/main/java/org/dinky/utils/RSUtils.java create mode 100644 dinky-admin/src/main/resources/mapper/UDFManageMapper.xml create mode 100644 dinky-web/src/components/Transfer/TreeTransfer/index.tsx create mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx create mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx rename dinky-web/src/pages/RegCenter/UDF/components/{ => UDFTemplate}/TemplateModal/TemplateProFrom/index.tsx (83%) rename dinky-web/src/pages/RegCenter/UDF/components/{ => UDFTemplate}/TemplateModal/index.tsx (70%) rename dinky-web/src/pages/RegCenter/UDF/components/{ => UDFTemplate}/TemplateTable/index.tsx (89%) create mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/UDFTemplateDesc/index.tsx create mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/index.tsx delete mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/UDFTemplateDesc/index.tsx delete mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/index.tsx diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java new file mode 100644 index 0000000000..d3940dd765 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -0,0 +1,95 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.controller; + +import org.dinky.data.dto.CommonDTO; +import org.dinky.data.model.Resources; +import org.dinky.data.model.UDFManage; +import org.dinky.data.result.Result; +import org.dinky.data.vo.UDFManageVO; +import org.dinky.service.UDFService; + +import java.util.List; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Flink udf controller + */ +@Slf4j +@Api(tags = "UDF Controller") +@RestController +@RequestMapping("/api/udf") +@RequiredArgsConstructor +public class UDFController { + private final UDFService udfService; + + /** + * update udf name by id + * + * @return Result + */ + @GetMapping("/list") + public Result> list() { + return Result.succeed(udfService.selectAll()); + } + + /** + * update udf + * + * @param udfManage udfManage + * @return Result + */ + @PostMapping("/update") + public Result update(@RequestBody UDFManage udfManage) { + udfService.update(udfManage); + return Result.succeed(); + } + + /** + * get udf resources list + * + * @return Result + */ + @GetMapping("/udfResourcesList") + public Result> udfResourcesList() { + return Result.succeed(udfService.udfResourcesList()); + } + + /** + * add or update by resource id + * + * @param dto dto + * @return Result + */ + @PostMapping("/addOrUpdateByResourceId") + public Result saveOrUpdate(@RequestBody CommonDTO> dto) { + udfService.addOrUpdateByResourceId(dto.getData()); + return Result.succeed(); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/CommonDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/CommonDTO.java new file mode 100644 index 0000000000..ff177672df --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/dto/CommonDTO.java @@ -0,0 +1,33 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.dto; + +import org.apache.poi.ss.formula.functions.T; + +import lombok.Data; + +/** + * When using post, the dto passed as a parameter(使用post时,一个参数传递的dto) + * @param params + */ +@Data +public class CommonDTO { + private T data; +} diff --git a/dinky-admin/src/main/java/org/dinky/data/model/UDFManage.java b/dinky-admin/src/main/java/org/dinky/data/model/UDFManage.java new file mode 100644 index 0000000000..c34d5d92e3 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/model/UDFManage.java @@ -0,0 +1,51 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.model; + +import org.dinky.mybatis.model.SuperEntity; + +import com.baomidou.mybatisplus.annotation.TableName; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +@Data +@EqualsAndHashCode(callSuper = false) +@TableName("dinky_udf_manage") +@ApiModel(value = "UDFTemplate", description = "User-Defined Function Template") +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class UDFManage extends SuperEntity { + + @ApiModelProperty(value = "Class Name", dataType = "String", notes = "Class Name") + private String className; + + @ApiModelProperty(value = "Task Id", dataType = "Integer", notes = "Task Id") + private Integer taskId; + + @ApiModelProperty(value = "Resources Id", dataType = "Integer", notes = "Resources Id") + private Integer resourcesId; +} diff --git a/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java b/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java new file mode 100644 index 0000000000..4ef22a55a5 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java @@ -0,0 +1,44 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.vo; + +import java.io.Serializable; +import java.util.Date; + +import lombok.Data; + +@Data +public class UDFManageVO implements Serializable { + private Integer id; + private String name; + private Boolean enabled; + private String className; + private Integer taskId; + private Integer resourcesId; + /** + * develop or resources + */ + private String source; + + private String dialect; + private String fileName; + private Date createTime; + private Date updateTime; +} diff --git a/dinky-admin/src/main/java/org/dinky/mapper/UDFManageMapper.java b/dinky-admin/src/main/java/org/dinky/mapper/UDFManageMapper.java new file mode 100644 index 0000000000..1b7c5001f7 --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/mapper/UDFManageMapper.java @@ -0,0 +1,36 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.mapper; + +import org.dinky.data.model.UDFManage; +import org.dinky.data.vo.UDFManageVO; +import org.dinky.mybatis.mapper.SuperMapper; + +import org.apache.ibatis.annotations.Mapper; + +import java.util.List; + +/** + * UDFManageMapper + */ +@Mapper +public interface UDFManageMapper extends SuperMapper { + List selectAll(); +} diff --git a/dinky-admin/src/main/java/org/dinky/service/UDFService.java b/dinky-admin/src/main/java/org/dinky/service/UDFService.java index 50fcd44766..f61aad2835 100644 --- a/dinky-admin/src/main/java/org/dinky/service/UDFService.java +++ b/dinky-admin/src/main/java/org/dinky/service/UDFService.java @@ -19,4 +19,40 @@ package org.dinky.service; -public interface UDFService {} +import org.dinky.data.model.Resources; +import org.dinky.data.model.UDFManage; +import org.dinky.data.vo.UDFManageVO; + +import java.util.List; + +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.extension.service.IService; + +public interface UDFService extends IService { + /** + * update udf name by id + * @param entity udf + * @return boolean + */ + boolean update(UDFManage entity); + + /** + * get all udf + * @return List + */ + List selectAll(); + + /** + * get udf by id + * @return UDFManage + */ + List udfResourcesList(); + + /** + * add or update udf by resourceIds + * @param resourceIds resourceIds + */ + @Transactional(rollbackFor = Exception.class) + void addOrUpdateByResourceId(List resourceIds); +} diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 71308ad93a..cc60e70d39 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -19,10 +19,136 @@ package org.dinky.service.impl; +import org.dinky.config.Dialect; +import org.dinky.data.model.Resources; +import org.dinky.data.model.UDFManage; +import org.dinky.data.vo.UDFManageVO; +import org.dinky.mapper.UDFManageMapper; import org.dinky.service.UDFService; +import org.dinky.service.resource.ResourcesService; +import org.dinky.utils.UDFUtils; + +import java.io.File; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.io.FileUtil; +import cn.hutool.core.lang.Assert; +import cn.hutool.core.util.CharUtil; +import cn.hutool.core.util.StrUtil; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; -/** @since 0.6.8 */ +/** + * @since 0.6.8 + */ @Service -public class UDFServiceImpl implements UDFService {} +@RequiredArgsConstructor +@Slf4j +public class UDFServiceImpl extends ServiceImpl implements UDFService { + private final ResourcesService resourcesService; + + @Override + public boolean update(UDFManage entity) { + Assert.notNull(entity, "Entity must be not null"); + Integer id = entity.getId(); + UDFManage byId = getById(id); + Assert.notNull(byId, "UDFManage not found"); + byId.setName(entity.getName()); + return super.updateById(byId); + } + + @Override + public List selectAll() { + List udfManageList = baseMapper.selectAll(); + return udfManageList.stream() + .filter(x -> "resources".equals(x.getSource())) + .peek(x -> { + String fileName = x.getFileName(); + if ("jar".equals(FileUtil.getSuffix(fileName))) { + x.setDialect(Dialect.JAVA.getValue()); + } else { + x.setDialect(Dialect.PYTHON.getValue()); + } + }) + .collect(Collectors.toList()); + } + + @Override + public List udfResourcesList() { + return resourcesService.getResourcesTreeByFilter(x -> { + String suffix = FileUtil.getSuffix(x.getFileName()); + return x.getIsDirectory() || "jar".equals(suffix) || "zip".equals(suffix) || "py".equals(suffix); + }); + } + + @Transactional(rollbackFor = Exception.class) + @Override + public void addOrUpdateByResourceId(List resourceIds) { + LambdaQueryWrapper queryWrapper = + new LambdaQueryWrapper().and(x -> x.isNotNull(UDFManage::getResourcesId)); + List udfManageList = baseMapper.selectList(queryWrapper); + List udfManageIdList = + udfManageList.stream().map(UDFManage::getResourcesId).distinct().collect(Collectors.toList()); + // 1. Delete all UDFs that are not in the resourceIds list. + List needDeleteList = udfManageList.stream() + .filter(x -> !resourceIds.contains(x.getResourcesId())) + .collect(Collectors.toList()); + removeByIds(needDeleteList); + // 2. Add all UDFs that are not in the UDFManage table. + Collection needAddList = + resourceIds.stream().filter(x -> !udfManageIdList.contains(x)).collect(Collectors.toList()); + if (CollUtil.isNotEmpty(needAddList)) { + List resources = resourcesService.listByIds(needAddList); + List manageList = resources.stream() + .flatMap(x -> { + String suffix = FileUtil.getSuffix(x.getFileName()); + if ("jar".equals(suffix)) { + File file = resourcesService.getFile(x.getId()); + List> classes = UDFUtils.getUdfClassByJar(file); + return classes.stream().map(clazz -> { + UDFManage udfManage = UDFManage.builder() + .className(clazz.getName()) + .resourcesId(x.getId()) + .build(); + udfManage.setName(StrUtil.toUnderlineCase(getSimpleClassName(clazz.getName()))); + return udfManage; + }); + } else if ("py".equals(suffix) || "zip".equals(suffix)) { + File file = resourcesService.getFile(x.getId()); + List pythonUdfList = UDFUtils.getPythonUdfList(file.getAbsolutePath()); + return pythonUdfList.stream().map(className -> { + UDFManage udfManage = UDFManage.builder() + .className(className) + .resourcesId(x.getId()) + .build(); + udfManage.setName(StrUtil.toUnderlineCase(getSimpleClassName(className))); + return udfManage; + }); + } else { + log.error("Unsupported file type: {}", suffix); + } + return Stream.of(); + }) + .collect(Collectors.toList()); + saveBatch(manageList); + } + } + + private static String getSimpleClassName(String className) { + final List packages = StrUtil.split(className, CharUtil.DOT); + if (null == packages || packages.size() < 2) { + return className; + } + return CollUtil.getLast(packages); + } +} diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/BaseResourceManager.java b/dinky-admin/src/main/java/org/dinky/service/resource/BaseResourceManager.java index fdc4950d08..6f58ee7757 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/BaseResourceManager.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/BaseResourceManager.java @@ -41,7 +41,7 @@ public interface BaseResourceManager { String getFileContent(String path); - InputStream getFile(String path); + InputStream readFile(String path); static BaseResourceManager getInstance() { switch (SystemConfiguration.getInstances().getResourcesModel().getValue()) { diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/ResourcesService.java b/dinky-admin/src/main/java/org/dinky/service/resource/ResourcesService.java index fea86d067d..5d0768c8b8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/ResourcesService.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/ResourcesService.java @@ -23,7 +23,9 @@ import org.dinky.data.model.Resources; import org.dinky.data.result.Result; +import java.io.File; import java.util.List; +import java.util.function.Function; import org.springframework.web.multipart.MultipartFile; @@ -77,6 +79,13 @@ public interface ResourcesService extends IService { */ String getContentByResourceId(Integer id); + /** + * Download files from explorer(从资源管理器下载文件) + * @param id resource id + * @return {@link File} + */ + File getFile(Integer id); + /** * Upload a file to the specified folder. * @@ -113,4 +122,11 @@ public interface ResourcesService extends IService { * @return {@link Result}< {@link List}< {@link Resources}>>} */ List getResourcesTree(); + + /** + * query Resources tree data by filter + * @param filterFunction filter function + * @return {@link Result}< {@link List}< {@link Resources}>>} + */ + List getResourcesTreeByFilter(Function filterFunction); } diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/impl/HdfsResourceManager.java b/dinky-admin/src/main/java/org/dinky/service/resource/impl/HdfsResourceManager.java index 34d8df4665..5fb0504008 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/impl/HdfsResourceManager.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/impl/HdfsResourceManager.java @@ -68,11 +68,11 @@ public void putFile(String path, MultipartFile file) { @Override public String getFileContent(String path) { - return IoUtil.readUtf8(getFile(path)); + return IoUtil.readUtf8(readFile(path)); } @Override - public InputStream getFile(String path) { + public InputStream readFile(String path) { try { return getHdfs().open(new Path(getFilePath(path))); } catch (IOException e) { diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/impl/OssResourceManager.java b/dinky-admin/src/main/java/org/dinky/service/resource/impl/OssResourceManager.java index 2e2fe4809e..b15976396e 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/impl/OssResourceManager.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/impl/OssResourceManager.java @@ -65,11 +65,11 @@ public void putFile(String path, MultipartFile file) { @Override public String getFileContent(String path) { - return IoUtil.readUtf8(getFile(path)); + return IoUtil.readUtf8(readFile(path)); } @Override - public InputStream getFile(String path) { + public InputStream readFile(String path) { return getOssTemplate() .getObject(getOssTemplate().getBucketName(), getFilePath(path)) .getObjectContent(); diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java index d1e71334ff..2043142561 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java @@ -27,10 +27,13 @@ import org.dinky.mapper.ResourcesMapper; import org.dinky.service.resource.BaseResourceManager; import org.dinky.service.resource.ResourcesService; +import org.dinky.utils.URLUtils; +import java.io.File; import java.util.ArrayList; import java.util.Comparator; import java.util.List; +import java.util.function.Function; import java.util.stream.Collectors; import org.springframework.stereotype.Service; @@ -161,6 +164,14 @@ public String getContentByResourceId(Integer id) { return getBaseResourceManager().getFileContent(resources.getFullName()); } + @Override + public File getFile(Integer id) { + Resources resources = getById(id); + Assert.notNull(resources, () -> new BusException(Status.RESOURCE_DIR_OR_FILE_NOT_EXIST)); + Assert.isFalse(resources.getSize() > ALLOW_MAX_CAT_CONTENT_SIZE, () -> new BusException("file is too large!")); + return URLUtils.toFile("rs:" + resources.getFullName()); + } + @Transactional(rollbackFor = Exception.class) @Override public void uploadFile(Integer pid, String desc, MultipartFile file) { @@ -289,6 +300,20 @@ public List getResourcesTree() { return buildResourcesTree(this.list()); } + /** + * query Resources tree data by filter + * + * @param filterFunction filter function + * @return {@link Result}< {@link List}< {@link Resources}>>} + */ + public List getResourcesTreeByFilter(Function filterFunction) { + List list = this.list(); + return buildResourcesTree( + filterFunction == null + ? list + : list.stream().filter(filterFunction::apply).collect(Collectors.toList())); + } + /** * build resources tree * diff --git a/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java b/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java index 06c920fcff..c19bfe8b1c 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java @@ -23,10 +23,16 @@ import org.dinky.data.annotation.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.Task; +import org.dinky.function.FunctionFactory; +import org.dinky.function.data.model.UDF; +import org.dinky.job.Job; import org.dinky.job.JobResult; import org.dinky.utils.UDFUtils; +import java.util.Collections; + import cn.hutool.core.bean.BeanUtil; +import cn.hutool.core.exceptions.ExceptionUtil; @SupportDialect({Dialect.JAVA, Dialect.PYTHON, Dialect.SCALA}) public class UdfTask extends BaseTask { @@ -36,12 +42,22 @@ public UdfTask(TaskDTO task) { @Override public JobResult execute() throws Exception { - UDFUtils.taskToUDF(BeanUtil.toBean(task, Task.class)); - return null; + JobResult jobResult = new JobResult(); + jobResult.setSuccess(true); + jobResult.setStatus(Job.JobStatus.SUCCESS); + try { + UDF udf = UDFUtils.taskToUDF(BeanUtil.toBean(task, Task.class)); + FunctionFactory.initUDF(Collections.singletonList(udf), task.getId()); + } catch (Exception e) { + jobResult.setSuccess(false); + jobResult.setError(ExceptionUtil.getRootCauseMessage(e)); + jobResult.setStatus(Job.JobStatus.FAILED); + } + return jobResult; } @Override public boolean stop() { - return false; + return true; } } diff --git a/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java b/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java index d95f85de92..77af19f38c 100644 --- a/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java +++ b/dinky-admin/src/main/java/org/dinky/url/RsURLConnection.java @@ -20,17 +20,12 @@ package org.dinky.url; import org.dinky.data.exception.BusException; -import org.dinky.function.constant.PathConstant; import org.dinky.service.resource.BaseResourceManager; -import java.io.File; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; -import cn.hutool.core.io.FileUtil; -import cn.hutool.core.util.StrUtil; - public class RsURLConnection extends URLConnection { private InputStream inputStream; @@ -40,6 +35,7 @@ public void connect() { if (instance == null) { throw BusException.valueOf("ResourceManager is disabled"); } + inputStream = instance.readFile(getURL().getPath()); } @Override @@ -51,10 +47,4 @@ public InputStream getInputStream() { public RsURLConnection(URL url) { super(url); } - - public File toFile() { - connect(); - String path = StrUtil.join(File.separator, PathConstant.TMP_PATH, "rs", getURL().getPath()); - return FileUtil.writeFromStream(inputStream, path); - } } diff --git a/dinky-admin/src/main/java/org/dinky/url/RsURLStreamHandler.java b/dinky-admin/src/main/java/org/dinky/url/RsURLStreamHandler.java index 0192153594..71bb1a7f18 100644 --- a/dinky-admin/src/main/java/org/dinky/url/RsURLStreamHandler.java +++ b/dinky-admin/src/main/java/org/dinky/url/RsURLStreamHandler.java @@ -19,7 +19,6 @@ package org.dinky.url; -import java.io.IOException; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandler; @@ -27,7 +26,7 @@ public class RsURLStreamHandler extends URLStreamHandler { @Override - protected URLConnection openConnection(URL u) throws IOException { + protected URLConnection openConnection(URL u) { return new RsURLConnection(u); } } diff --git a/dinky-admin/src/main/java/org/dinky/utils/RSUtils.java b/dinky-admin/src/main/java/org/dinky/utils/RSUtils.java new file mode 100644 index 0000000000..4bd9e1ae5d --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/utils/RSUtils.java @@ -0,0 +1,35 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.dinky.function.constant.PathConstant; + +import java.io.File; + +import cn.hutool.core.util.StrUtil; + +/** + * Resources Utils + */ +public class RSUtils { + public static String getFilePath(String path) { + return StrUtil.join(File.separator, PathConstant.TMP_PATH, "rs", path); + } +} diff --git a/dinky-admin/src/main/resources/db/db-h2.sql b/dinky-admin/src/main/resources/db/db-h2.sql index f920a69330..f88d16598a 100644 --- a/dinky-admin/src/main/resources/db/db-h2.sql +++ b/dinky-admin/src/main/resources/db/db-h2.sql @@ -2895,3 +2895,14 @@ INSERT INTO dinky_alert_template VALUES (1, 'Default', ' - **${(exceptions.rootException)?substring(0,20)}** [Go toTask Web](http://${taskUrl}) ', 1, null, null); + +CREATE TABLE `dinky_udf_manage` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(50) DEFAULT NULL COMMENT 'udf name', + `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', + `task_id` int(11) DEFAULT NULL COMMENT 'task id', + `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', + `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', + `create_time` datetime DEFAULT NULL COMMENT 'create time', + `update_time` datetime DEFAULT NULL COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = DYNAMIC; diff --git a/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml b/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml new file mode 100644 index 0000000000..9ec2209aaf --- /dev/null +++ b/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml @@ -0,0 +1,30 @@ + + + + + + + diff --git a/dinky-common/src/main/java/org/dinky/utils/URLUtils.java b/dinky-common/src/main/java/org/dinky/utils/URLUtils.java index b1faa2e30f..e2a490e073 100644 --- a/dinky-common/src/main/java/org/dinky/utils/URLUtils.java +++ b/dinky-common/src/main/java/org/dinky/utils/URLUtils.java @@ -30,11 +30,15 @@ import cn.hutool.core.io.FileUtil; import cn.hutool.core.util.StrUtil; -/** @since 0.7.0 */ +/** + * @since 0.7.0 + */ public class URLUtils { private static final String TMP_PATH = StrUtil.join(File.separator, System.getProperty("user.dir"), "tmp"); + /** * url download file to local + * * @param urlPath urlPath * @return file */ @@ -44,9 +48,9 @@ public static File toFile(String urlPath) { URLConnection urlConnection = url.openConnection(); if ("http".equals(url.getProtocol()) || "https".equals(url.getProtocol()) - || "hdfs".equals(url.getProtocol()) - || "rs".equals(url.getProtocol())) { - String path = StrUtil.join(File.separator, TMP_PATH, "downloadFile", url.getPath()); + || "hdfs".equals(url.getProtocol())) { + } else if ("rs".equals(url.getProtocol())) { + String path = StrUtil.join(File.separator, TMP_PATH, "rs", url.getPath()); return FileUtil.writeFromStream(urlConnection.getInputStream(), path); } else if ("file".equals(url.getProtocol())) { return new File(url.getPath()); diff --git a/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java b/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java index ec71ad2874..1fa1748122 100644 --- a/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java +++ b/dinky-function/src/main/java/org/dinky/function/FunctionFactory.java @@ -30,9 +30,18 @@ /** @since 0.6.8 */ public class FunctionFactory { + /** + * UDF compilation & packaging initialization(udf编译 & 打包 初始化) + * @param udfClassList udf列表 + * @param missionId 当前任务id + * @return 打包过后的路径 + */ + public static UDFPath initUDF(List udfClassList, Integer missionId) { + return initUDF(udfClassList, missionId, new Configuration()); + } /** - * udf编译 & 打包 初始化 + * UDF compilation & packaging initialization(udf编译 & 打包 初始化) * * @param udfClassList udf列表 * @param missionId 当前任务id diff --git a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java index abf3dab15f..ad084ebc11 100644 --- a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java +++ b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java @@ -24,6 +24,7 @@ import org.dinky.context.DinkyClassLoaderContextHolder; import org.dinky.context.FlinkUdfPathContextHolder; import org.dinky.data.exception.DinkyException; +import org.dinky.data.model.SystemConfiguration; import org.dinky.function.FunctionFactory; import org.dinky.function.compiler.CustomStringJavaCompiler; import org.dinky.function.compiler.CustomStringScalaCompiler; @@ -94,7 +95,9 @@ public class UDFUtil { public static final String YARN = "YARN"; public static final String APPLICATION = "APPLICATION"; - /** 网关类型 map 快速获取 session 与 application 等类型,为了减少判断 */ + /** + * 网关类型 map 快速获取 session 与 application 等类型,为了减少判断 + */ public static final Map> GATEWAY_TYPE_MAP = MapUtil.builder( SESSION, Arrays.asList(GatewayType.YARN_SESSION, GatewayType.KUBERNETES_SESSION, GatewayType.STANDALONE)) @@ -103,7 +106,9 @@ public class UDFUtil { .build(); protected static final Logger log = LoggerFactory.getLogger(UDFUtil.class); - /** 存放 udf md5与版本对应的k,v值 */ + /** + * 存放 udf md5与版本对应的k,v值 + */ protected static final Map UDF_MD5_MAP = new HashMap<>(); public static final String PYTHON_UDF_ATTR = "(\\S)\\s+=\\s+ud(?:f|tf|af|taf)"; @@ -115,8 +120,8 @@ public class UDFUtil { /** * 模板解析 * - * @param dialect 方言 - * @param template 模板 + * @param dialect 方言 + * @param template 模板 * @param className 类名 * @return {@link String} */ @@ -268,7 +273,9 @@ public static String getUdfFileAndBuildJar(List codeList) { } } - /** 扫描udf包文件,写入md5到 UDF_MD5_MAP */ + /** + * 扫描udf包文件,写入md5到 UDF_MD5_MAP + */ @Deprecated private static void scanUDFMD5() { List fileList = FileUtil.listFileNames(PathConstant.UDF_PATH); @@ -367,6 +374,10 @@ public static List> getUdfClassByJar(File jarPath) { return classList; } + public static List getPythonUdfList(String udfFile) { + return getPythonUdfList(SystemConfiguration.getInstances().getPythonHome(), udfFile); + } + public static List getPythonUdfList(String pythonPath, String udfFile) { File checkFile = new File(PathConstant.TMP_PATH, "getPyFuncList.py"); if (!checkFile.exists()) { @@ -386,13 +397,11 @@ public static List getPythonUdfList(String pythonPath, String udfFile) { configuration.set(PythonOptions.PYTHON_FILES, udfFile + ".zip"); configuration.set(PythonOptions.PYTHON_CLIENT_EXECUTABLE, pythonPath); configuration.set(PythonOptions.PYTHON_EXECUTABLE, pythonPath); - - System.out.println(udfName); try { PythonFunctionFactory.getPythonFunction(udfName, configuration, null); successUdfList.add(udfName); } catch (Exception e) { - e.printStackTrace(); + log.error("", e); } } return successUdfList; diff --git a/dinky-web/src/components/Transfer/TreeTransfer/index.tsx b/dinky-web/src/components/Transfer/TreeTransfer/index.tsx new file mode 100644 index 0000000000..4196263ccd --- /dev/null +++ b/dinky-web/src/components/Transfer/TreeTransfer/index.tsx @@ -0,0 +1,90 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import React, {Key} from 'react'; +import {theme, Transfer, Tree} from 'antd'; +import type {TransferDirection, TransferItem} from 'antd/es/transfer'; +import type {DataNode} from 'antd/es/tree'; + +interface TreeTransferProps { + dataSource: DataNode[]; + targetKeys: Key[]; + onChange?: (targetKeys: Key[], direction: TransferDirection, moveKeys: string[]) => void; + height?: number; +} + +const isChecked = (selectedKeys: React.Key[], eventKey: React.Key) => + selectedKeys.includes(eventKey); +const generateTree = (treeNodes: DataNode[] = [], checkedKeys: Key[] = []): DataNode[] => + treeNodes.map(({children, ...props}) => ({ + ...props, + disabled: checkedKeys.includes(props.key as string), + children: generateTree(children, checkedKeys), + })); +export const TreeTransfer: React.FC = ({dataSource, targetKeys,height, ...restProps}) => { + const {token} = theme.useToken(); + + const transferDataSource: TransferItem[] = []; + + function flatten(list: DataNode[] = []) { + list.forEach((item) => { + transferDataSource.push(item as TransferItem); + flatten(item.children); + }); + } + + flatten(dataSource); + + return ( + item.path} + showSelectAll={false} + > + {({direction, onItemSelect, selectedKeys}) => { + if (direction === 'left') { + const checkedKeys = [...selectedKeys, ...targetKeys]; + return ( +
                      + { + onItemSelect(key as string, !isChecked(checkedKeys, key)); + }} + onSelect={(_, {node: {key}}) => { + onItemSelect(key as string, !isChecked(checkedKeys, key)); + }} + /> +
                      + ); + } + }} +
                      + ); +}; diff --git a/dinky-web/src/global.less b/dinky-web/src/global.less index 0b1ba2c539..687cb81fab 100644 --- a/dinky-web/src/global.less +++ b/dinky-web/src/global.less @@ -206,8 +206,8 @@ ol { } .ant-table-wrapper { - overflow-x: auto; - overflow-y: auto; + overflow-x: hidden; + overflow-y: hidden; overflow-block: auto; } diff --git a/dinky-web/src/locales/en-US/menu.ts b/dinky-web/src/locales/en-US/menu.ts index 1248320648..18243136a0 100644 --- a/dinky-web/src/locales/en-US/menu.ts +++ b/dinky-web/src/locales/en-US/menu.ts @@ -59,7 +59,7 @@ export default { 'menu.registration.document': 'Document', 'menu.registration.fragment': 'Global Variable', 'menu.registration.gitproject': 'Git Project(Beta)', - 'menu.registration.udf': 'UDF Template', + 'menu.registration.udf': 'UDF', 'menu.registration.resource': 'Resource', 'menu.auth': 'Auth Center', 'menu.auth.user': 'User', diff --git a/dinky-web/src/locales/zh-CN/menu.ts b/dinky-web/src/locales/zh-CN/menu.ts index 3a5bb1e132..0d994ac8ae 100644 --- a/dinky-web/src/locales/zh-CN/menu.ts +++ b/dinky-web/src/locales/zh-CN/menu.ts @@ -59,7 +59,7 @@ export default { 'menu.registration.document': '文档', 'menu.registration.fragment': '全局变量', 'menu.registration.gitproject': 'Git 项目(Beta)', - 'menu.registration.udf': 'UDF 模板', + 'menu.registration.udf': 'UDF', 'menu.registration.resource': '资源', 'menu.auth': '认证中心', 'menu.auth.user': '用户', diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx new file mode 100644 index 0000000000..7f675d944f --- /dev/null +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx @@ -0,0 +1,231 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import {ProColumns} from "@ant-design/pro-components"; +import {UDFRegisterInfo, UDFRegisterInfoParent} from "@/types/RegCenter/data"; +import {EditBtn} from "@/components/CallBackButton/EditBtn"; +import {PopconfirmDeleteBtn} from "@/components/CallBackButton/PopconfirmDeleteBtn"; +import {Key, useEffect, useRef, useState} from "react"; +import ProTable, {ActionType} from "@ant-design/pro-table"; +import {useRequest} from "@@/plugin-request"; +import {API_CONSTANTS} from "@/services/endpoints"; +import {Button, Modal} from "antd"; +import {PlusOutlined} from "@ant-design/icons"; +import {TreeTransfer} from "@/components/Transfer/TreeTransfer"; +import {buildResourceTreeData} from "@/pages/RegCenter/Resource/components/FileTree/function"; +import {add, update} from "./service"; +import {l} from "@/utils/intl"; + + +const UDFRegister = () => { + const req = useRequest<{ + data: UDFRegisterInfo[] + }>({ + url: API_CONSTANTS.UDF_LIST + }); + const req_resources = useRequest({ + url: API_CONSTANTS.UDF_RESOURCES_LIST + }); + const actionRef = useRef(); + const [udfRegisterState, setUDFRegisterState] = useState<{ + editableKeys: Key[]; + isEdit: boolean; + isAdd: boolean; + dataSource: UDFRegisterInfo[]; + }>({ + editableKeys: [], + dataSource: [], + isEdit: false, + isAdd: false, + }); + const [showEdit, setShowEdit] = useState(false); + const [targetKeys, setTargetKeys] = useState([]); + useEffect(() => { + setTargetKeys([...new Set(req.data?.map(x => x.resourcesId))]) + }, [req.data]) + const editableKeysChange = (editableKeys: Key[]) => { + setUDFRegisterState(prevState => ({...prevState, editableKeys})); + } + + const groupData: { + [p: string]: UDFRegisterInfo[] | undefined + } = Object.fromEntries( + Array.from(new Set(req.data?.map(({fileName}) => fileName))).map((type) => [ + type, + req.data?.filter((item) => item.fileName === type), + ]), + ) + const parentData: UDFRegisterInfoParent[] = Object.keys(groupData)?.map((key) => { + const d = groupData[key] ?? []; + return { + resourcesId: d[0].resourcesId, + dialect: d[0].dialect, + source: d[0].source, + fileName: key, + num: d.length, + } + }) + + + const columnsParent: ProColumns[] = [ + { + title: '文件名称', + width: 120, + dataIndex: 'fileName', + }, { + title: 'udf解析数量', + width: 120, + dataIndex: 'num', + }, { + title: '来源', + width: 120, + dataIndex: 'source', + valueEnum: { + resources: {text: "resources"}, + develop: {text: 'develop'}, + } + }, { + title: '语言', + width: 120, + dataIndex: 'dialect', + valueEnum: { + java: {text: "Java"}, + python: {text: 'Python'}, + } + } + ] + const expandedRowRender = (expandedRow: UDFRegisterInfoParent) => { + + const columns: ProColumns[] = [ + { + title: '名称', + dataIndex: 'name', + width: '10%', + formItemProps: { + rules: [ + { + required: true, + message: '请输入名称', + }, + ], + }, + }, + { + title: '类名', + dataIndex: 'className', + readonly: true, + width: '15%', + }, + { + title: '更新时间', + dataIndex: 'updateTime', + readonly: true, + valueType: 'dateTime', + width: '15%', + }, + { + title: '操作', + valueType: 'option', + width: '10%', + render: (_text, record, _, action) => { + return [ + { + action?.startEditable?.(record.id); + setUDFRegisterState(prevState => ({...prevState, isEdit: true, isAdd: false})); + }}/> + , record.source == "develop" ? + { + }} description={"确定删除吗???"}/> + : <> + ] + }, + } + ] + + return ( + { + await update(row.id, row.name); + await req.refresh(); + actionRef.current?.reload(); + }, + actionRender: (_, _2, defaultDom) => [ + defaultDom.save, + defaultDom.cancel + ] + }} + /> + ); + }; + + + return ( + <> + + dataSource={parentData} + columns={columnsParent} + rowKey="resourcesId" + pagination={{ + showQuickJumper: true, + }} + expandable={{expandedRowRender}} + search={false} + dateFormatter="string" + options={false} + toolBarRender={() => [ + + ]} + /> + setShowEdit(false)} + onOk={() => { + add(targetKeys).then(() => { + req.refresh().then(() => { + setShowEdit(false) + }) + }) + }}> + + + + ) +} + +export default UDFRegister diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx new file mode 100644 index 0000000000..70554658ff --- /dev/null +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx @@ -0,0 +1,29 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import {postAll} from "@/services/api"; +import {API_CONSTANTS} from "@/services/endpoints"; +import {Key} from "react"; + +export const add = (ids:Key[]) => { + return postAll(API_CONSTANTS.UDF_ADD,{data:ids}); +} +export const update = (id:Key,name:string) => { + return postAll(API_CONSTANTS.UDF_UPDATE,{id,name}); +} \ No newline at end of file diff --git a/dinky-web/src/pages/RegCenter/UDF/components/TemplateModal/TemplateProFrom/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/TemplateProFrom/index.tsx similarity index 83% rename from dinky-web/src/pages/RegCenter/UDF/components/TemplateModal/TemplateProFrom/index.tsx rename to dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/TemplateProFrom/index.tsx index 61d9b70598..d44abaa72e 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/TemplateModal/TemplateProFrom/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/TemplateProFrom/index.tsx @@ -1,18 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import CodeEdit from '@/components/CustomEditor/CodeEdit'; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/TemplateModal/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/index.tsx similarity index 70% rename from dinky-web/src/pages/RegCenter/UDF/components/TemplateModal/index.tsx rename to dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/index.tsx index 57f945dd46..7630b79e29 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/TemplateModal/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/index.tsx @@ -1,22 +1,24 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import { FormContextValue } from '@/components/Context/FormContext'; -import TemplateProFrom from '@/pages/RegCenter/UDF/components/TemplateModal/TemplateProFrom'; +import TemplateProFrom from '@/pages/RegCenter/UDF/components/UDFTemplate/TemplateModal/TemplateProFrom'; import { GitProject, UDFTemplate } from '@/types/RegCenter/data'; import { l } from '@/utils/intl'; import { ModalForm } from '@ant-design/pro-components'; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx similarity index 89% rename from dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx rename to dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx index b7f839bbf7..4c76689939 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/TemplateTable/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx @@ -1,18 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import { CreateBtn } from '@/components/CallBackButton/CreateBtn'; @@ -21,7 +23,6 @@ import { EnableSwitchBtn } from '@/components/CallBackButton/EnableSwitchBtn'; import { PopconfirmDeleteBtn } from '@/components/CallBackButton/PopconfirmDeleteBtn'; import CodeShow from '@/components/CustomEditor/CodeShow'; import { Authorized, HasAuthority } from '@/hooks/useAccess'; -import TemplateModal from '@/pages/RegCenter/UDF/components/TemplateModal'; import { CODE_TYPE_ENUM, CODE_TYPE_FILTER, @@ -39,6 +40,7 @@ import { l } from '@/utils/intl'; import { ProTable } from '@ant-design/pro-components'; import { ActionType, ProColumns } from '@ant-design/pro-table'; import React, { useRef, useState } from 'react'; +import TemplateModal from '../TemplateModal'; import UDFTemplateDrawer from '../UDFTemplateDrawer'; const CodeShowProps: any = { diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/UDFTemplateDesc/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/UDFTemplateDesc/index.tsx new file mode 100644 index 0000000000..4be59b8e40 --- /dev/null +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/UDFTemplateDesc/index.tsx @@ -0,0 +1,46 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { GlobalVar, UDFTemplate } from '@/types/RegCenter/data'; +import { ProDescriptions } from '@ant-design/pro-components'; +import React from 'react'; + +type UDFTemplateDescProps = { + values: Partial; + columns: any; +}; +const UDFTemplateDesc: React.FC = (props) => { + const { values, columns } = props; + return ( + <> + + column={1} + loading={values && Object.keys(values).length === 0} + title={values.name} + request={async () => ({ + data: values + })} + params={{ id: values.id }} + columns={columns} + /> + + ); +}; + +export default UDFTemplateDesc; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/index.tsx new file mode 100644 index 0000000000..d504d038e1 --- /dev/null +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/index.tsx @@ -0,0 +1,43 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { UDFTemplate } from '@/types/RegCenter/data'; +import { Drawer } from 'antd'; +import React from 'react'; +import UDFTemplateDesc from "@/pages/RegCenter/UDF/components/UDFTemplate/UDFTemplateDrawer/UDFTemplateDesc"; + +type UDFTemplateDrawerProps = { + onCancel: (flag?: boolean) => void; + values: Partial; + modalVisible: boolean; + columns: any; +}; +const UDFTemplateDrawer: React.FC = (props) => { + const { onCancel: handleCancel, values, modalVisible, columns } = props; + + return ( + <> + handleCancel(false)}> + + + + ); +}; + +export default UDFTemplateDrawer; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/UDFTemplateDesc/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/UDFTemplateDesc/index.tsx deleted file mode 100644 index cb4510d68c..0000000000 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/UDFTemplateDesc/index.tsx +++ /dev/null @@ -1,46 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { GlobalVar, UDFTemplate } from '@/types/RegCenter/data'; -import { ProDescriptions } from '@ant-design/pro-components'; -import React from 'react'; - -type UDFTemplateDescProps = { - values: Partial; - columns: any; -}; -const UDFTemplateDesc: React.FC = (props) => { - const { values, columns } = props; - return ( - <> - - column={1} - loading={values && Object.keys(values).length === 0} - title={values.name} - request={async () => ({ - data: values - })} - params={{ id: values.id }} - columns={columns} - /> - - ); -}; - -export default UDFTemplateDesc; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/index.tsx deleted file mode 100644 index 6dea9c4575..0000000000 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplateDrawer/index.tsx +++ /dev/null @@ -1,43 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { UDFTemplate } from '@/types/RegCenter/data'; -import { Drawer } from 'antd'; -import React from 'react'; -import UDFTemplateDesc from './UDFTemplateDesc'; - -type UDFTemplateDrawerProps = { - onCancel: (flag?: boolean) => void; - values: Partial; - modalVisible: boolean; - columns: any; -}; -const UDFTemplateDrawer: React.FC = (props) => { - const { onCancel: handleCancel, values, modalVisible, columns } = props; - - return ( - <> - handleCancel(false)}> - - - - ); -}; - -export default UDFTemplateDrawer; diff --git a/dinky-web/src/pages/RegCenter/UDF/index.tsx b/dinky-web/src/pages/RegCenter/UDF/index.tsx index 0f868e1054..5cc3a6ee93 100644 --- a/dinky-web/src/pages/RegCenter/UDF/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/index.tsx @@ -1,30 +1,53 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import SlowlyAppear from '@/components/Animation/SlowlyAppear'; -import TemplateTable from '@/pages/RegCenter/UDF/components/TemplateTable'; -import { PageContainer } from '@ant-design/pro-components'; +import TemplateTable from '@/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable'; +import {PageContainer} from '@ant-design/pro-components'; +import UDFRegister from "@/pages/RegCenter/UDF/components/UDFRegister"; +import * as React from "react"; export default () => { return ( - - - + , + }, + { + tab: 'UDF 模版', + key: 'udf-template', + children: , + }, + ]} + title={false} + /> ); }; diff --git a/dinky-web/src/services/endpoints.tsx b/dinky-web/src/services/endpoints.tsx index ee89ac0a6c..1003e246b6 100644 --- a/dinky-web/src/services/endpoints.tsx +++ b/dinky-web/src/services/endpoints.tsx @@ -1,25 +1,22 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ -/** - * the interface api constants - */ export enum API_CONSTANTS { // --- user --- // login path @@ -187,6 +184,12 @@ export enum API_CONSTANTS { // ---- get project build logs by id---- GIT_PROJECT_BUILD_STEP_LOGS = '/api/git/build-step-logs', + //UDF Manage + UDF_LIST = '/api/udf/list', + UDF_RESOURCES_LIST = '/api/udf/udfResourcesList', + UDF_ADD = '/api/udf/addOrUpdateByResourceId', + UDF_UPDATE = '/api/udf/update', + // UDF template UDF_TEMPLATE = '/api/udf/template/list', // UDF template add or update diff --git a/dinky-web/src/types/RegCenter/data.d.ts b/dinky-web/src/types/RegCenter/data.d.ts index cea4064cd4..1c0ad9f83b 100644 --- a/dinky-web/src/types/RegCenter/data.d.ts +++ b/dinky-web/src/types/RegCenter/data.d.ts @@ -1,18 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import { BaseBeanColumns } from '@/types/Public/data'; @@ -221,6 +223,41 @@ export type UDFTemplate = BaseBeanColumns & { templateCode: string; }; + +export interface UDFRegisterInfo { + id: number; + resourcesId: number; + name: string; + className: string; + enable: boolean; + dialect: string; + source: string; + fileName: string; + // tenantId: number; + // createTime: string; + updateTime: Date; +} +export interface UDFRegisterInfoParent { + num: number; + resourcesId: number; + dialect: string; + source: string; + fileName: string; +} +export interface UDFRegisterInfoChild { + id: number; + resourcesId: number; + name: string; + className: string; + enable: boolean; + dialect: string; + source: string; + fileName: string; + // tenantId: number; + // createTime: string; + updateTime: Date; +} + export interface ResourceInfo { id: number; fileName: string; diff --git a/pom.xml b/pom.xml index 48cfa2a939..f3ab3bfc66 100644 --- a/pom.xml +++ b/pom.xml @@ -858,6 +858,7 @@ + apache-snapshots https://maven.aliyun.com/repository/apache-snapshots diff --git a/script/sql/dinky-mysql.sql b/script/sql/dinky-mysql.sql index 27e94e092c..fa7dd2f6ed 100644 --- a/script/sql/dinky-mysql.sql +++ b/script/sql/dinky-mysql.sql @@ -1921,3 +1921,15 @@ INSERT INTO dinky_alert_template VALUES (1, 'Default', ' COMMIT; SET FOREIGN_KEY_CHECKS = 1; +CREATE TABLE `dinky_udf_manage` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(50) DEFAULT NULL COMMENT 'udf name', + `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', + `task_id` int(11) DEFAULT NULL COMMENT 'task id', + `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', + `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', + `create_time` datetime DEFAULT NULL COMMENT 'create time', + `update_time` datetime DEFAULT NULL COMMENT 'update time', + PRIMARY KEY (`id`) USING BTREE, + KEY `name,resources_id` (`name`,`resources_id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC COMMENT='udf'; \ No newline at end of file diff --git a/script/sql/dinky-pg.sql b/script/sql/dinky-pg.sql index 68030833b8..a1e277e0c7 100644 --- a/script/sql/dinky-pg.sql +++ b/script/sql/dinky-pg.sql @@ -2739,3 +2739,40 @@ INSERT INTO public.dinky_alert_template VALUES (1, 'Default', ' ', 1, null, null); COMMIT; + +CREATE TABLE "public"."dinky_udf_manage" ( + "id" int4 NOT NULL, + "name" varchar(50) COLLATE "pg_catalog"."default", + "class_name" varchar(50) COLLATE "pg_catalog"."default", + "task_id" int4, + "resources_id" int4, + "enabled" int2, + "create_time" timestamp(6), + "update_time" timestamp(6), + CONSTRAINT "dinky_udf_manage_pkey" PRIMARY KEY ("id") +) +; + +ALTER TABLE "public"."dinky_udf_manage" + OWNER TO "postgres"; + +CREATE INDEX "name,resources_id" ON "public"."dinky_udf_manage" USING btree ( + "name" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST, + "resources_id" "pg_catalog"."int2_ops" ASC NULLS LAST + ); + +COMMENT ON COLUMN "public"."dinky_udf_manage"."name" IS 'udf name'; + +COMMENT ON COLUMN "public"."dinky_udf_manage"."class_name" IS 'Complete class name'; + +COMMENT ON COLUMN "public"."dinky_udf_manage"."task_id" IS 'task id'; + +COMMENT ON COLUMN "public"."dinky_udf_manage"."resources_id" IS 'resources id'; + +COMMENT ON COLUMN "public"."dinky_udf_manage"."enabled" IS 'is enable'; + +COMMENT ON COLUMN "public"."dinky_udf_manage"."create_time" IS 'create time'; + +COMMENT ON COLUMN "public"."dinky_udf_manage"."update_time" IS 'update time'; + +COMMENT ON TABLE "public"."dinky_udf_manage" IS 'udf'; \ No newline at end of file diff --git a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql index 065b875f29..56f0c3987b 100644 --- a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql +++ b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql @@ -290,6 +290,18 @@ CREATE TABLE `dinky_sys_token` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci COMMENT='token management'; +CREATE TABLE `dinky_udf_manage` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(50) DEFAULT NULL COMMENT 'udf name', + `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', + `task_id` int(11) DEFAULT NULL COMMENT 'task id', + `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', + `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', + `create_time` datetime DEFAULT NULL COMMENT 'create time', + `update_time` datetime DEFAULT NULL COMMENT 'update time', + PRIMARY KEY (`id`) USING BTREE, + KEY `name,resources_id` (`name`,`resources_id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC COMMENT='udf'; drop table if exists dinky_task_statement; From 869c68dc185ed06ee00e98a9040fd4ebec6848d3 Mon Sep 17 00:00:00 2001 From: aiwenmo <32723967+aiwenmo@users.noreply.github.com> Date: Sat, 4 Nov 2023 10:21:15 +0800 Subject: [PATCH 04/21] [Fix-1847][metadata] Fix postgreSql get DDL script error (#2492) Co-authored-by: wenmo <32723967+wenmo@users.noreply.github.com> --- .../src/main/java/org/dinky/metadata/convert/ITypeConvert.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/convert/ITypeConvert.java b/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/convert/ITypeConvert.java index a57c04d19b..8689dfdc42 100644 --- a/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/convert/ITypeConvert.java +++ b/dinky-metadata/dinky-metadata-base/src/main/java/org/dinky/metadata/convert/ITypeConvert.java @@ -76,6 +76,7 @@ default Object convertValue(ResultSet results, String columnName, String javaTyp case "blob": return results.getBlob(columnName); case "boolean": + case "bool": case "bit": return results.getBoolean(columnName); case "byte": From 439754d4316f792ec1eff0c9b974d3e0873ec323 Mon Sep 17 00:00:00 2001 From: aiwenmo <32723967+aiwenmo@users.noreply.github.com> Date: Sat, 4 Nov 2023 10:21:49 +0800 Subject: [PATCH 05/21] [Fix-1887] [admin] Fix DolphinScheduler can not generate task code in createTaskDefinition (#2493) Co-authored-by: wenmo <32723967+wenmo@users.noreply.github.com> --- .../main/java/org/dinky/controller/SchedulerController.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java b/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java index 212974d0c3..87387340a1 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/SchedulerController.java @@ -207,6 +207,9 @@ public Result createTaskDefinition( return Result.failed(Status.DS_WORK_FLOW_DEFINITION_TASK_NAME_EXIST, processName, taskName); } + Long taskCode = taskClient.genTaskCode(projectCode); + taskRequest.setCode(taskCode); + String taskDefinitionJsonObj = JSONUtil.toJsonStr(taskRequest); taskClient.createTaskDefinition(projectCode, process.getCode(), upstreamCodes, taskDefinitionJsonObj); From f38a06c9cb3d8d3016e38ea7ab6786e03c48f730 Mon Sep 17 00:00:00 2001 From: aiwenmo <32723967+aiwenmo@users.noreply.github.com> Date: Sat, 4 Nov 2023 10:22:49 +0800 Subject: [PATCH 06/21] [Fix-1849][client] Fix CDCSOURCE kafkaSink transactionalIdPrefix NullPointerException (#2490) Co-authored-by: wenmo <32723967+wenmo@users.noreply.github.com> --- .../org/dinky/cdc/SinkBuilderFactory.java | 2 + .../org/dinky/cdc/kafka/KafkaSinkBuilder.java | 193 ++++++++++++++++++ 2 files changed, 195 insertions(+) create mode 100644 dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kafka/KafkaSinkBuilder.java diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/SinkBuilderFactory.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/SinkBuilderFactory.java index 0dbb97b82e..3aa19e3922 100644 --- a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/SinkBuilderFactory.java +++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/SinkBuilderFactory.java @@ -20,6 +20,7 @@ package org.dinky.cdc; import org.dinky.assertion.Asserts; +import org.dinky.cdc.kafka.KafkaSinkBuilder; import org.dinky.cdc.sql.SQLSinkBuilder; import org.dinky.cdc.sql.catalog.SQLCatalogSinkBuilder; import org.dinky.data.model.FlinkCDCConfig; @@ -59,6 +60,7 @@ private static Map> getPlusSinkBuilder() { Map> map = new HashMap<>(); map.put(SQLSinkBuilder.KEY_WORD, SQLSinkBuilder::new); map.put(SQLCatalogSinkBuilder.KEY_WORD, SQLCatalogSinkBuilder::new); + map.put(KafkaSinkBuilder.KEY_WORD, KafkaSinkBuilder::new); final ServiceLoader loader = ServiceLoader.load(SinkBuilder.class); diff --git a/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kafka/KafkaSinkBuilder.java b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kafka/KafkaSinkBuilder.java new file mode 100644 index 0000000000..0954c18cde --- /dev/null +++ b/dinky-cdc/dinky-cdc-core/src/main/java/org/dinky/cdc/kafka/KafkaSinkBuilder.java @@ -0,0 +1,193 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.cdc.kafka; + +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import org.dinky.assertion.Asserts; +import org.dinky.cdc.AbstractSinkBuilder; +import org.dinky.cdc.CDCBuilder; +import org.dinky.cdc.SinkBuilder; +import org.dinky.data.model.FlinkCDCConfig; +import org.dinky.data.model.Schema; +import org.dinky.data.model.Table; +import org.dinky.executor.CustomTableEnvironment; + +import org.apache.flink.api.common.serialization.SimpleStringSchema; +import org.apache.flink.connector.base.DeliveryGuarantee; +import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; +import org.apache.flink.connector.kafka.sink.KafkaSink; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.flink.streaming.api.datastream.DataStream; +import org.apache.flink.streaming.api.datastream.DataStreamSource; +import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.streaming.api.functions.ProcessFunction; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.types.logical.LogicalType; +import org.apache.flink.util.Collector; +import org.apache.flink.util.OutputTag; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +/** + * MysqlCDCBuilder + **/ +public class KafkaSinkBuilder extends AbstractSinkBuilder implements Serializable { + + public static final String KEY_WORD = "datastream-kafka"; + + public KafkaSinkBuilder() {} + + public KafkaSinkBuilder(FlinkCDCConfig config) { + super(config); + } + + @Override + public void addSink( + StreamExecutionEnvironment env, + DataStream rowDataDataStream, + Table table, + List columnNameList, + List columnTypeList) {} + + @Override + public String getHandle() { + return KEY_WORD; + } + + @Override + public SinkBuilder create(FlinkCDCConfig config) { + return new KafkaSinkBuilder(config); + } + + @Override + public DataStreamSource build( + CDCBuilder cdcBuilder, + StreamExecutionEnvironment env, + CustomTableEnvironment customTableEnvironment, + DataStreamSource dataStreamSource) { + Properties kafkaProducerConfig = getProperties(); + if (Asserts.isNotNullString(config.getSink().get("topic"))) { + org.apache.flink.connector.kafka.sink.KafkaSinkBuilder kafkaSinkBuilder = + KafkaSink.builder() + .setBootstrapServers(config.getSink().get("brokers")) + .setRecordSerializer(KafkaRecordSerializationSchema.builder() + .setTopic(config.getSink().get("topic")) + .setValueSerializationSchema(new SimpleStringSchema()) + .build()) + .setDeliverGuarantee(DeliveryGuarantee.valueOf( + env.getCheckpointingMode().name())); + if (!kafkaProducerConfig.isEmpty()) { + kafkaSinkBuilder.setKafkaProducerConfig(kafkaProducerConfig); + } + if (!kafkaProducerConfig.isEmpty() + && kafkaProducerConfig.containsKey("transactional.id") + && Asserts.isNotNullString(kafkaProducerConfig.getProperty("transactional.id"))) { + kafkaSinkBuilder.setTransactionalIdPrefix(kafkaProducerConfig.getProperty("transactional.id")); + } + KafkaSink kafkaSink = kafkaSinkBuilder.build(); + dataStreamSource.sinkTo(kafkaSink); + } else { + Map> tagMap = new HashMap<>(); + Map tableMap = new HashMap<>(); + ObjectMapper objectMapper = new ObjectMapper(); + SingleOutputStreamOperator mapOperator = dataStreamSource + .map(x -> objectMapper.readValue(x, Map.class)) + .returns(Map.class); + final List schemaList = config.getSchemaList(); + final String schemaFieldName = config.getSchemaFieldName(); + if (Asserts.isNotNullCollection(schemaList)) { + for (Schema schema : schemaList) { + for (Table table : schema.getTables()) { + String sinkTableName = getSinkTableName(table); + OutputTag outputTag = new OutputTag(sinkTableName) {}; + tagMap.put(table, outputTag); + tableMap.put(table.getSchemaTableName(), table); + } + } + SingleOutputStreamOperator process = mapOperator.process(new ProcessFunction() { + + @Override + public void processElement(Map map, ProcessFunction.Context ctx, Collector out) + throws Exception { + LinkedHashMap source = (LinkedHashMap) map.get("source"); + try { + String result = objectMapper.writeValueAsString(map); + Table table = + tableMap.get(source.get(schemaFieldName).toString() + "." + + source.get("table").toString()); + OutputTag outputTag = tagMap.get(table); + ctx.output(outputTag, result); + } catch (Exception e) { + out.collect(objectMapper.writeValueAsString(map)); + } + } + }); + tagMap.forEach((k, v) -> { + String topic = getSinkTableName(k); + org.apache.flink.connector.kafka.sink.KafkaSinkBuilder kafkaSinkBuilder = + KafkaSink.builder() + .setBootstrapServers(config.getSink().get("brokers")) + .setRecordSerializer(KafkaRecordSerializationSchema.builder() + .setTopic(topic) + .setValueSerializationSchema(new SimpleStringSchema()) + .build()) + .setDeliverGuarantee(DeliveryGuarantee.valueOf( + env.getCheckpointingMode().name())); + if (!kafkaProducerConfig.isEmpty()) { + kafkaSinkBuilder.setKafkaProducerConfig(kafkaProducerConfig); + } + if (!kafkaProducerConfig.isEmpty() + && kafkaProducerConfig.containsKey("transactional.id") + && Asserts.isNotNullString(kafkaProducerConfig.getProperty("transactional.id"))) { + kafkaSinkBuilder.setTransactionalIdPrefix( + kafkaProducerConfig.getProperty("transactional.id") + "-" + topic); + } + KafkaSink kafkaSink = kafkaSinkBuilder.build(); + process.getSideOutput(v).rebalance().sinkTo(kafkaSink).name(topic); + }); + } + } + return dataStreamSource; + } +} From 2551fd76e6c0fbb995aaf010fb9dc8863a7d45ec Mon Sep 17 00:00:00 2001 From: zhu-mingye <934230207@qq.com> Date: Fri, 3 Nov 2023 21:58:32 -0500 Subject: [PATCH 07/21] support filter empty dir (#2491) * Spotless Apply * Spotless Apply * support filter empty dir * Spotless Apply * support filter empty dir * Spotless Apply * fix --------- Co-authored-by: zhu-mingye --- .../src/components/Icons/CodeLanguageIcon.tsx | 150 ++++++- .../src/components/TableTransfer/index.tsx | 3 +- .../Transfer/TreeTransfer/index.tsx | 122 +++--- dinky-web/src/locales/en-US/pages.ts | 11 +- dinky-web/src/locales/zh-CN/pages.ts | 12 +- .../MiddleContainer/Editor/constants.tsx | 2 +- .../Resource/components/FileTree/function.tsx | 105 +++-- .../UDFRegister/UDFRegisterModal/index.tsx | 62 +++ .../UDF/components/UDFRegister/index.tsx | 405 +++++++++--------- .../UDF/components/UDFRegister/service.tsx | 18 +- .../UDFTemplate/TemplateTable/index.tsx | 2 +- .../UDFTemplate/UDFTemplateDrawer/index.tsx | 2 +- dinky-web/src/pages/RegCenter/UDF/index.tsx | 40 +- dinky-web/src/services/constants.tsx | 31 +- dinky-web/src/types/RegCenter/data.d.ts | 1 - dinky-web/src/utils/function.tsx | 39 +- 16 files changed, 644 insertions(+), 361 deletions(-) create mode 100644 dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/UDFRegisterModal/index.tsx diff --git a/dinky-web/src/components/Icons/CodeLanguageIcon.tsx b/dinky-web/src/components/Icons/CodeLanguageIcon.tsx index f0465d8186..e7fa235034 100644 --- a/dinky-web/src/components/Icons/CodeLanguageIcon.tsx +++ b/dinky-web/src/components/Icons/CodeLanguageIcon.tsx @@ -1,18 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import Icon from '@ant-design/icons'; @@ -140,17 +142,131 @@ export const FolderSvgExpand = () => { component={() => ( + + + + + + + + + + )} + /> + + ); +}; + +export const ZipSvg = () => { + return ( + ( + + + + + + + + )} + /> + ); +}; + +export const ConfigSvg = () => { + return ( + ( + + + + + )} + /> + ); +}; +export const JarSvg = () => { + return ( + <> + ( + + + )} diff --git a/dinky-web/src/components/TableTransfer/index.tsx b/dinky-web/src/components/TableTransfer/index.tsx index edd561bf25..13e8b5f6f0 100644 --- a/dinky-web/src/components/TableTransfer/index.tsx +++ b/dinky-web/src/components/TableTransfer/index.tsx @@ -1,4 +1,5 @@ /* + * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. @@ -16,7 +17,7 @@ * */ -import { UserBaseInfo } from '@/types/User/data'; +import { UserBaseInfo } from '@/types/AuthCenter/data.d'; import { ProColumns, ProTable } from '@ant-design/pro-components'; import { Transfer } from 'antd'; import { TableRowSelection } from 'antd/es/table/interface'; diff --git a/dinky-web/src/components/Transfer/TreeTransfer/index.tsx b/dinky-web/src/components/Transfer/TreeTransfer/index.tsx index 4196263ccd..ce48cdb586 100644 --- a/dinky-web/src/components/Transfer/TreeTransfer/index.tsx +++ b/dinky-web/src/components/Transfer/TreeTransfer/index.tsx @@ -17,74 +17,74 @@ * */ -import React, {Key} from 'react'; -import {theme, Transfer, Tree} from 'antd'; -import type {TransferDirection, TransferItem} from 'antd/es/transfer'; -import type {DataNode} from 'antd/es/tree'; +import { Transfer, Tree } from 'antd'; +import type { TransferDirection, TransferItem } from 'antd/es/transfer'; +import type { DataNode } from 'antd/es/tree'; +import React, { Key } from 'react'; +const { DirectoryTree } = Tree; interface TreeTransferProps { - dataSource: DataNode[]; - targetKeys: Key[]; - onChange?: (targetKeys: Key[], direction: TransferDirection, moveKeys: string[]) => void; - height?: number; + dataSource: DataNode[]; + targetKeys: Key[]; + onChange?: (targetKeys: Key[], direction: TransferDirection, moveKeys: string[]) => void; } const isChecked = (selectedKeys: React.Key[], eventKey: React.Key) => - selectedKeys.includes(eventKey); + selectedKeys.includes(eventKey); const generateTree = (treeNodes: DataNode[] = [], checkedKeys: Key[] = []): DataNode[] => - treeNodes.map(({children, ...props}) => ({ - ...props, - disabled: checkedKeys.includes(props.key as string), - children: generateTree(children, checkedKeys), - })); -export const TreeTransfer: React.FC = ({dataSource, targetKeys,height, ...restProps}) => { - const {token} = theme.useToken(); + treeNodes.map(({ children, ...props }) => ({ + ...props, + disabled: checkedKeys.includes(props.key as string), + children: generateTree(children, checkedKeys) + })); +export const TreeTransfer: React.FC = ({ + dataSource, + targetKeys, + ...restProps +}) => { + const transferDataSource: TransferItem[] = []; - const transferDataSource: TransferItem[] = []; + function flatten(list: DataNode[] = []) { + list.forEach((item) => { + transferDataSource.push(item as TransferItem); + flatten(item.children); + }); + } - function flatten(list: DataNode[] = []) { - list.forEach((item) => { - transferDataSource.push(item as TransferItem); - flatten(item.children); - }); - } + flatten(dataSource); - flatten(dataSource); - - return ( - item.path} - showSelectAll={false} - > - {({direction, onItemSelect, selectedKeys}) => { - if (direction === 'left') { - const checkedKeys = [...selectedKeys, ...targetKeys]; - return ( -
                      - { - onItemSelect(key as string, !isChecked(checkedKeys, key)); - }} - onSelect={(_, {node: {key}}) => { - onItemSelect(key as string, !isChecked(checkedKeys, key)); - }} - /> -
                      - ); - } - }} -
                      - ); + return ( + record.id as string} + targetKeys={targetKeys as string[]} + dataSource={transferDataSource} + className={'treeList'} + render={(item) => item.path} + showSelectAll={true} + > + {({ direction, onItemSelect, selectedKeys }) => { + if (direction === 'left') { + const checkedKeys = [...selectedKeys, ...targetKeys]; + return ( + { + onItemSelect(key as string, !isChecked(checkedKeys, key)); + }} + onSelect={(_, { node: { key } }) => { + onItemSelect(key as string, !isChecked(checkedKeys, key)); + }} + /> + ); + } + }} + + ); }; diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index 1e8d76728f..8528d0b6b8 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -840,7 +840,16 @@ export default { 'rc.template.templateCode': 'Template Code', 'rc.template.templateCodeLabel': 'Template Code( {language} )', 'rc.template.templateCodePlaceholder': 'Please edit the template code! ', - 'rc.udf.management': 'UDF Template Management', + 'rc.udf.template.management': 'UDF Template Management', + 'rc.udf.register.management': 'UDF Register Management', + 'rc.udf.register.file.name': 'File Name', + 'rc.udf.register.parse.count': 'UDF Parse Count', + 'rc.udf.register.source': 'Source', + 'rc.udf.register.language': 'Language', + 'rc.udf.register.name': 'UDF Name', + 'rc.udf.register.className': 'Class Name', + 'rc.udf.register.deleteConfirm': 'Are you sure you want to delete this UDF? ', + 'rc.udf.register': 'Register UDF', /** * * role diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index ff3b9e1e78..3c33e9ed72 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -802,7 +802,17 @@ export default { 'rc.template.templateCode': '模板代码', 'rc.template.templateCodeLabel': '模板代码({language})', 'rc.template.templateCodePlaceholder': '请编辑模板代码!', - 'rc.udf.management': 'UDF 模板管理', + 'rc.udf.template.management': 'UDF 模板管理', + 'rc.udf.register.management': 'UDF 注册管理', + 'rc.udf.register.file.name': '文件名称', + 'rc.udf.register.parse.count': 'UDF 解析数量', + 'rc.udf.register.source': '来源', + 'rc.udf.register.language': '语言', + 'rc.udf.register.name': '名称', + 'rc.udf.register.className': '类名', + 'rc.udf.register.deleteConfirm': '确定删除该 UDF 吗?', + 'rc.udf.register': '注册 UDF', + /** * * role diff --git a/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx b/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx index 3043d3a2ed..35d7323bdb 100644 --- a/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx +++ b/dinky-web/src/pages/DataStudio/MiddleContainer/Editor/constants.tsx @@ -57,5 +57,5 @@ export const TASK_VAR_FILTER = [ 'clusterConfigurationName', 'databaseName', 'envName', - 'alertGroupName', + 'alertGroupName' ]; diff --git a/dinky-web/src/pages/RegCenter/Resource/components/FileTree/function.tsx b/dinky-web/src/pages/RegCenter/Resource/components/FileTree/function.tsx index a8e4e0c563..86ec0eb510 100644 --- a/dinky-web/src/pages/RegCenter/Resource/components/FileTree/function.tsx +++ b/dinky-web/src/pages/RegCenter/Resource/components/FileTree/function.tsx @@ -1,22 +1,23 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ +import { TagAlignCenter } from '@/components/StyledComponents'; import { ResourceInfo } from '@/types/RegCenter/data'; import { parseByteStr, renderIcon } from '@/utils/function'; import { l } from '@/utils/intl'; @@ -35,19 +36,67 @@ const buildTitleLabel = (item: ResourceInfo) => { ); }; -export const buildResourceTreeData = (data: ResourceInfo[]): any => - data.map((item: ResourceInfo) => { - return { - isLeaf: !item.isDirectory, - name: item.fileName, - parentId: item.pid, - label: item.fullName + '/' + item.fileName, - icon: renderIcon(item.fileName, '.', item.isDirectory), - path: item.fullName, - title: buildTitleLabel(item), - fullInfo: item, - key: item.id, - id: item.id, - children: item.children && buildResourceTreeData(item.children) - }; - }); +/** + * 判断目录是否为空 + * @param directory + */ +function isDirectoryEmpty(directory: ResourceInfo): boolean { + if (!directory.children) { + return true; + } + for (let child of directory.children) { + if (child.isDirectory) { + if (!isDirectoryEmpty(child)) { + return false; + } + } else { + return false; + } + } + return true; +} + +function filterEmpty( + isFilterEmptyChildren: boolean, + item: ResourceInfo, + filterSuffixList: string[] +) { + if (isFilterEmptyChildren) { + if (item.isDirectory) { + // 如果是目录,则递归遍历看最深处是否是空目录,是的话过滤掉 + return !isFilterEmptyChildren || !isDirectoryEmpty(item); + } else { + // 如果是文件,则判断该文件末尾后缀是不是 .jar 文件,是的话留着,不是的话仍然过滤 + // 获取文件后缀 , 如果没有后缀则返回空 false + const suffix = item.fileName.split('.').reverse().pop(); + if (suffix) { + // 如果有后缀,则判断是否在过滤列表中 + return !filterSuffixList.includes(suffix); + } + } + } + return true; +} + +export const buildResourceTreeData = ( + data: ResourceInfo[], + isFilterEmptyChildren = false, + filterSuffixList: string[] = [] +): any => + data + .filter((item: ResourceInfo) => filterEmpty(isFilterEmptyChildren, item, filterSuffixList)) + .map((item: ResourceInfo) => { + return { + isLeaf: !item.isDirectory, + name: item.fileName, + parentId: item.pid, + label: item.fullName + '/' + item.fileName, + icon: {renderIcon(item.fileName, '.', item.isDirectory)}, + path: item.fullName, + title: buildTitleLabel(item), + fullInfo: item, + key: item.id, + id: item.id, + children: item.children && buildResourceTreeData(item.children, isFilterEmptyChildren) + }; + }); diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/UDFRegisterModal/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/UDFRegisterModal/index.tsx new file mode 100644 index 0000000000..a87f1fd8bd --- /dev/null +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/UDFRegisterModal/index.tsx @@ -0,0 +1,62 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { TreeTransfer } from '@/components/Transfer/TreeTransfer'; +import { buildResourceTreeData } from '@/pages/RegCenter/Resource/components/FileTree/function'; +import { l } from '@/utils/intl'; +import { Modal } from 'antd'; +import { TransferDirection } from 'antd/es/transfer'; +import React, { Key, memo } from 'react'; + +type UDFRegisterModalProps = { + showEdit: boolean; + openChange: (showEdit: boolean) => void; + onOk: () => void; + targetKeys: Key[]; + targetKeyChange: (targetKeys: Key[], direction: TransferDirection, moveKeys: string[]) => void; + treeData: any[]; +}; + +const UDFRegisterModal: React.FC = (props) => { + const { showEdit, openChange, treeData, targetKeys, targetKeyChange, onOk } = props; + + return ( + <> + openChange(false)} + onOk={() => onOk()} + > + + + + ); +}; + +export default memo(UDFRegisterModal); diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx index 7f675d944f..e32b6c7e00 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx @@ -17,215 +17,218 @@ * */ -import {ProColumns} from "@ant-design/pro-components"; -import {UDFRegisterInfo, UDFRegisterInfoParent} from "@/types/RegCenter/data"; -import {EditBtn} from "@/components/CallBackButton/EditBtn"; -import {PopconfirmDeleteBtn} from "@/components/CallBackButton/PopconfirmDeleteBtn"; -import {Key, useEffect, useRef, useState} from "react"; -import ProTable, {ActionType} from "@ant-design/pro-table"; -import {useRequest} from "@@/plugin-request"; -import {API_CONSTANTS} from "@/services/endpoints"; -import {Button, Modal} from "antd"; -import {PlusOutlined} from "@ant-design/icons"; -import {TreeTransfer} from "@/components/Transfer/TreeTransfer"; -import {buildResourceTreeData} from "@/pages/RegCenter/Resource/components/FileTree/function"; -import {add, update} from "./service"; -import {l} from "@/utils/intl"; +import { EditBtn } from '@/components/CallBackButton/EditBtn'; +import { PopconfirmDeleteBtn } from '@/components/CallBackButton/PopconfirmDeleteBtn'; +import UDFRegisterModal from '@/pages/RegCenter/UDF/components/UDFRegister/UDFRegisterModal'; +import { API_CONSTANTS } from '@/services/endpoints'; +import { UDFRegisterInfo, UDFRegisterInfoParent } from '@/types/RegCenter/data'; +import { l } from '@/utils/intl'; +import { useRequest } from '@@/plugin-request'; +import { ProColumns } from '@ant-design/pro-components'; +import ProTable, { ActionType } from '@ant-design/pro-table'; +import React, { Key, useEffect, useRef, useState } from 'react'; +import { add, update } from './service'; +type UDFRegisterProps = { + showEdit: boolean; + showEditChange: (showEdit: boolean) => void; +}; -const UDFRegister = () => { - const req = useRequest<{ - data: UDFRegisterInfo[] - }>({ - url: API_CONSTANTS.UDF_LIST - }); - const req_resources = useRequest({ - url: API_CONSTANTS.UDF_RESOURCES_LIST - }); - const actionRef = useRef(); - const [udfRegisterState, setUDFRegisterState] = useState<{ - editableKeys: Key[]; - isEdit: boolean; - isAdd: boolean; - dataSource: UDFRegisterInfo[]; - }>({ - editableKeys: [], - dataSource: [], - isEdit: false, - isAdd: false, - }); - const [showEdit, setShowEdit] = useState(false); - const [targetKeys, setTargetKeys] = useState([]); - useEffect(() => { - setTargetKeys([...new Set(req.data?.map(x => x.resourcesId))]) - }, [req.data]) - const editableKeysChange = (editableKeys: Key[]) => { - setUDFRegisterState(prevState => ({...prevState, editableKeys})); - } - - const groupData: { - [p: string]: UDFRegisterInfo[] | undefined - } = Object.fromEntries( - Array.from(new Set(req.data?.map(({fileName}) => fileName))).map((type) => [ - type, - req.data?.filter((item) => item.fileName === type), - ]), - ) - const parentData: UDFRegisterInfoParent[] = Object.keys(groupData)?.map((key) => { - const d = groupData[key] ?? []; - return { - resourcesId: d[0].resourcesId, - dialect: d[0].dialect, - source: d[0].source, - fileName: key, - num: d.length, - } - }) +const UDFRegister: React.FC = (props) => { + const { showEdit, showEditChange } = props; + const udfRegisterInfoRequest = useRequest<{ + data: UDFRegisterInfo[]; + }>({ + url: API_CONSTANTS.UDF_LIST + }); - const columnsParent: ProColumns[] = [ - { - title: '文件名称', - width: 120, - dataIndex: 'fileName', - }, { - title: 'udf解析数量', - width: 120, - dataIndex: 'num', - }, { - title: '来源', - width: 120, - dataIndex: 'source', - valueEnum: { - resources: {text: "resources"}, - develop: {text: 'develop'}, - } - }, { - title: '语言', - width: 120, - dataIndex: 'dialect', - valueEnum: { - java: {text: "Java"}, - python: {text: 'Python'}, - } - } - ] - const expandedRowRender = (expandedRow: UDFRegisterInfoParent) => { + const resourceInfoRequest = useRequest({ + url: API_CONSTANTS.UDF_RESOURCES_LIST + }); + const actionRef = useRef(); + const [udfRegisterState, setUDFRegisterState] = useState<{ + editableKeys: Key[]; + isEdit: boolean; + isAdd: boolean; + dataSource: UDFRegisterInfo[]; + }>({ + editableKeys: [], + dataSource: [], + isEdit: false, + isAdd: false + }); - const columns: ProColumns[] = [ - { - title: '名称', - dataIndex: 'name', - width: '10%', - formItemProps: { - rules: [ - { - required: true, - message: '请输入名称', - }, - ], - }, - }, - { - title: '类名', - dataIndex: 'className', - readonly: true, - width: '15%', - }, - { - title: '更新时间', - dataIndex: 'updateTime', - readonly: true, - valueType: 'dateTime', - width: '15%', - }, - { - title: '操作', - valueType: 'option', - width: '10%', - render: (_text, record, _, action) => { - return [ - { - action?.startEditable?.(record.id); - setUDFRegisterState(prevState => ({...prevState, isEdit: true, isAdd: false})); - }}/> - , record.source == "develop" ? - { - }} description={"确定删除吗???"}/> - : <> - ] - }, - } - ] + const [targetKeys, setTargetKeys] = useState([]); + useEffect(() => { + setTargetKeys([...new Set(udfRegisterInfoRequest.data?.map((x) => x.resourcesId))]); + }, [udfRegisterInfoRequest.data]); + const editableKeysChange = (editableKeys: Key[]) => { + setUDFRegisterState((prevState) => ({ ...prevState, editableKeys })); + }; - return ( - { - await update(row.id, row.name); - await req.refresh(); - actionRef.current?.reload(); - }, - actionRender: (_, _2, defaultDom) => [ - defaultDom.save, - defaultDom.cancel - ] - }} - /> - ); + const groupData: { + [p: string]: UDFRegisterInfo[] | undefined; + } = Object.fromEntries( + Array.from(new Set(udfRegisterInfoRequest.data?.map(({ fileName }) => fileName))).map( + (type) => [type, udfRegisterInfoRequest.data?.filter((item) => item.fileName === type)] + ) + ); + const parentData: UDFRegisterInfoParent[] = Object.keys(groupData)?.map((key) => { + const d = groupData[key] ?? []; + return { + resourcesId: d[0].resourcesId, + dialect: d[0].dialect, + source: d[0].source, + fileName: key, + num: d.length }; + }); + const columnsParent: ProColumns[] = [ + { + title: l('rc.udf.register.file.name'), + width: '25%', + dataIndex: 'fileName' + }, + { + title: l('rc.udf.register.parse.count'), + width: '25%', + sorter: true, + dataIndex: 'num' + }, + { + title: l('rc.udf.register.source'), + width: '25%', + dataIndex: 'source', + valueEnum: { + resources: { text: 'resources' }, + develop: { text: 'develop' } + } + }, + { + title: l('rc.udf.register.language'), + width: '25%', + dataIndex: 'dialect', + valueEnum: { + java: { text: 'Java' }, + python: { text: 'Python' } + } + } + ]; + const expandedRowRender = (expandedRow: UDFRegisterInfoParent) => { + const columns: ProColumns[] = [ + { + title: l('rc.udf.register.name'), + dataIndex: 'name', + width: '10%' + }, + { + title: l('rc.udf.register.className'), + dataIndex: 'className', + readonly: true, + width: '15%' + }, + { + title: l('global.table.updateTime'), + dataIndex: 'updateTime', + readonly: true, + valueType: 'dateTime', + width: '15%' + }, + { + title: l('global.table.operate'), + valueType: 'option', + width: '10%', + render: (_text, record, _, action) => { + return [ + { + action?.startEditable?.(record.id); + setUDFRegisterState((prevState) => ({ ...prevState, isEdit: true, isAdd: false })); + }} + />, + record.source == 'develop' ? ( + {}} + description={l('rc.udf.register.deleteConfirm')} + /> + ) : ( + <> + ) + ]; + } + } + ]; + + const handleOnSave = async (row: UDFRegisterInfo) => { + await update(row.id, row.name); + await udfRegisterInfoRequest.refresh(); + actionRef.current?.reload(); + }; return ( - <> - - dataSource={parentData} - columns={columnsParent} - rowKey="resourcesId" - pagination={{ - showQuickJumper: true, - }} - expandable={{expandedRowRender}} - search={false} - dateFormatter="string" - options={false} - toolBarRender={() => [ - - ]} - /> - setShowEdit(false)} - onOk={() => { - add(targetKeys).then(() => { - req.refresh().then(() => { - setShowEdit(false) - }) - }) - }}> - - - - ) -} + handleOnSave(row), + actionRender: (_, _2, defaultDom) => [defaultDom.save, defaultDom.cancel] + }} + /> + ); + }; + + /** + * submit register udf + */ + const handleUdfRefresh = () => { + add(targetKeys).then(() => { + udfRegisterInfoRequest.refresh().then(() => { + showEditChange(false); + }); + }); + }; + + return ( + <> + + dataSource={parentData} + columns={columnsParent} + rowKey='resourcesId' + size={'small'} + pagination={{ + hideOnSinglePage: true, + showQuickJumper: true + }} + expandable={{ expandedRowRender }} + search={false} + dateFormatter='string' + options={false} + /> + + handleUdfRefresh()} + targetKeys={targetKeys} + targetKeyChange={setTargetKeys} + treeData={resourceInfoRequest.data ?? []} + /> + + ); +}; -export default UDFRegister +export default UDFRegister; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx index 70554658ff..3cd1b01ab0 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/service.tsx @@ -17,13 +17,13 @@ * */ -import {postAll} from "@/services/api"; -import {API_CONSTANTS} from "@/services/endpoints"; -import {Key} from "react"; +import { postAll } from '@/services/api'; +import { API_CONSTANTS } from '@/services/endpoints'; +import { Key } from 'react'; -export const add = (ids:Key[]) => { - return postAll(API_CONSTANTS.UDF_ADD,{data:ids}); -} -export const update = (id:Key,name:string) => { - return postAll(API_CONSTANTS.UDF_UPDATE,{id,name}); -} \ No newline at end of file +export const add = (ids: Key[]) => { + return postAll(API_CONSTANTS.UDF_ADD, { data: ids }); +}; +export const update = (id: Key, name: string) => { + return postAll(API_CONSTANTS.UDF_UPDATE, { id, name }); +}; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx index 4c76689939..be24370afb 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable/index.tsx @@ -218,7 +218,7 @@ const TemplateTable: React.FC = () => { {...PROTABLE_OPTIONS_PUBLIC} loading={templateState.loading} actionRef={actionRef} - headerTitle={l('rc.udf.management')} + headerTitle={l('rc.udf.template.management')} toolBarRender={() => [ void; diff --git a/dinky-web/src/pages/RegCenter/UDF/index.tsx b/dinky-web/src/pages/RegCenter/UDF/index.tsx index 5cc3a6ee93..ad3e55fced 100644 --- a/dinky-web/src/pages/RegCenter/UDF/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/index.tsx @@ -18,12 +18,23 @@ */ import SlowlyAppear from '@/components/Animation/SlowlyAppear'; +import { CreateBtn } from '@/components/CallBackButton/CreateBtn'; +import UDFRegister from '@/pages/RegCenter/UDF/components/UDFRegister'; import TemplateTable from '@/pages/RegCenter/UDF/components/UDFTemplate/TemplateTable'; -import {PageContainer} from '@ant-design/pro-components'; -import UDFRegister from "@/pages/RegCenter/UDF/components/UDFRegister"; -import * as React from "react"; +import { l } from '@/utils/intl'; +import { PageContainer } from '@ant-design/pro-components'; +import * as React from 'react'; export default () => { + const [showEdit, setShowEdit] = React.useState(false); + const [activeKey, setActiveKey] = React.useState('udf-register'); + + const renderExtra = () => { + return activeKey === 'udf-register' ? ( + setShowEdit(true)} /> + ) : null; + }; + return ( { type: 'card', size: 'small', animated: true, - tabBarGutter: 10, - centered: true, + tabBarGutter: 5, + centered: true }} + onTabChange={setActiveKey} + tabActiveKey={activeKey} + tabBarExtraContent={renderExtra()} tabList={[ - { - tab: 'UDF 注册管理', - key: 'udf-register', - children: , - }, { - tab: 'UDF 模版', - key: 'udf-template', - children: , + tab: l('rc.udf.register.management'), + key: 'udf-register', + children: }, + { + tab: l('rc.udf.template.management'), + key: 'udf-template', + children: + } ]} title={false} /> diff --git a/dinky-web/src/services/constants.tsx b/dinky-web/src/services/constants.tsx index fd7d50b3a9..b7a5341710 100644 --- a/dinky-web/src/services/constants.tsx +++ b/dinky-web/src/services/constants.tsx @@ -1,18 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import { l } from '@/utils/intl'; @@ -199,6 +201,7 @@ export const DIALECT = { PYTHON_LONG: 'python', YML: 'yml', YAML: 'yaml', + CONF: 'conf', SH: 'sh', BASH: 'bash', CMD: 'cmd', @@ -207,6 +210,10 @@ export const DIALECT = { SQL: 'sql', JAVASCRIPT: 'javascript', FLINKJAR: 'flinkjar', + JAR: 'jar', + ZIP: 'zip', + TAR: 'tar', + TAR_GZ: 'gz', FLINKSQLENV: 'flinksqlenv', MYSQL: 'mysql', ORACLE: 'oracle', diff --git a/dinky-web/src/types/RegCenter/data.d.ts b/dinky-web/src/types/RegCenter/data.d.ts index 1c0ad9f83b..138687fa92 100644 --- a/dinky-web/src/types/RegCenter/data.d.ts +++ b/dinky-web/src/types/RegCenter/data.d.ts @@ -223,7 +223,6 @@ export type UDFTemplate = BaseBeanColumns & { templateCode: string; }; - export interface UDFRegisterInfo { id: number; resourcesId: number; diff --git a/dinky-web/src/utils/function.tsx b/dinky-web/src/utils/function.tsx index 8afd30db73..6401a13829 100644 --- a/dinky-web/src/utils/function.tsx +++ b/dinky-web/src/utils/function.tsx @@ -1,24 +1,28 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import { + ConfigSvg, FileIcon, FlinkSQLSvg, FolderSvgExpand, + JarSvg, JavaSvg, LogSvg, MarkDownSvg, @@ -26,7 +30,8 @@ import { ScalaSvg, ShellSvg, XMLSvg, - YAMLSvg + YAMLSvg, + ZipSvg } from '@/components/Icons/CodeLanguageIcon'; import { DATETIME_FORMAT, @@ -279,12 +284,20 @@ export const getIcon = (type: string) => { case DIALECT.YAML: case DIALECT.YML: return ; + case DIALECT.JAR: + return ; case DIALECT.SH: case DIALECT.BASH: case DIALECT.CMD: return ; + case DIALECT.CONF: + return ; case DIALECT.LOG: return ; + case DIALECT.ZIP: + case DIALECT.TAR: + case DIALECT.TAR_GZ: + return ; case DIALECT.FLINK_SQL: return ; default: From 450201107f2dd52189095f152ad11364347174e6 Mon Sep 17 00:00:00 2001 From: gaoyan Date: Sat, 4 Nov 2023 17:21:19 +0800 Subject: [PATCH 08/21] Add job done hook, Solve the problem of getting "UNKNOWN" after job completion in application mode (#2494) * Optimize the process * add job done hook * optimizeTheCode * optimizeTheCode --- .../controller/JobInstanceController.java | 18 ++- .../main/java/org/dinky/job/FlinkJobTask.java | 5 + .../dinky/job/handler/JobRefreshHandler.java | 18 ++- .../org/dinky/service/JobInstanceService.java | 11 +- .../service/impl/JobInstanceServiceImpl.java | 43 ++++++- .../src/main/java/org/dinky/app/MainApp.java | 4 + .../java/org/dinky/app/util/FlinkAppUtil.java | 117 ++++++++++++++++++ .../org/dinky/daemon/entity/TaskQueue.java | 22 +++- .../dinky/daemon/pool/DefaultThreadPool.java | 6 + .../org/dinky/daemon/pool/ThreadPool.java | 3 + .../org/dinky/daemon/task/DaemonTask.java | 2 + .../dinky/daemon/task/DaemonTaskConfig.java | 15 +++ .../org/dinky/constant/FlinkConstant.java | 2 + 13 files changed, 252 insertions(+), 14 deletions(-) create mode 100644 dinky-app/dinky-app-base/src/main/java/org/dinky/app/util/FlinkAppUtil.java diff --git a/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java b/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java index b22a35556a..72201b7720 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java @@ -46,6 +46,7 @@ import com.fasterxml.jackson.databind.JsonNode; +import cn.dev33.satoken.annotation.SaIgnore; import cn.hutool.core.lang.Dict; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -133,8 +134,9 @@ public Result getOneById(@RequestBody ID id) { dataType = "Integer", paramType = "query", required = true) - public Result refreshJobInfoDetail(@RequestParam Integer id) { - return Result.succeed(jobInstanceService.refreshJobInfoDetail(id)); + public Result refreshJobInfoDetail( + @RequestParam Integer id, @RequestParam(defaultValue = "false") boolean isForce) { + return Result.succeed(jobInstanceService.refreshJobInfoDetail(id, isForce)); } /** @@ -301,4 +303,16 @@ public Result getJobMetricsItems( @RequestParam String metrics) { return Result.succeed(FlinkAPI.build(address).getJobMetricsData(jobId, verticeId, metrics)); } + + @GetMapping("/hookJobDone") + @ApiOperation("hookJobDone") + @SaIgnore + public Result hookJobDone(@RequestParam String jobId, @RequestParam Integer taskId) { + boolean done = jobInstanceService.hookJobDone(jobId, taskId); + if (done) { + return Result.succeed(); + } else { + return Result.failed(); + } + } } diff --git a/dinky-admin/src/main/java/org/dinky/job/FlinkJobTask.java b/dinky-admin/src/main/java/org/dinky/job/FlinkJobTask.java index a6e8b10069..6db7014c81 100644 --- a/dinky-admin/src/main/java/org/dinky/job/FlinkJobTask.java +++ b/dinky-admin/src/main/java/org/dinky/job/FlinkJobTask.java @@ -58,6 +58,11 @@ public DaemonTask setConfig(DaemonTaskConfig config) { return this; } + @Override + public DaemonTaskConfig getConfig() { + return config; + } + /** * Processing tasks. *

                      diff --git a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java index 38051af6c5..808d1af7c2 100644 --- a/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java +++ b/dinky-admin/src/main/java/org/dinky/job/handler/JobRefreshHandler.java @@ -55,6 +55,8 @@ import com.alibaba.fastjson2.JSON; import com.fasterxml.jackson.databind.JsonNode; +import cn.hutool.core.bean.BeanUtil; +import cn.hutool.core.bean.copier.CopyOptions; import cn.hutool.json.JSONUtil; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -90,12 +92,18 @@ public static boolean refreshJob(JobInfoDetail jobInfoDetail, boolean needSave) jobInfoDetail.getInstance().getName()); JobInstance jobInstance = jobInfoDetail.getInstance(); + JobDataDto jobDataDto = jobInfoDetail.getJobDataDto(); String oldStatus = jobInstance.getStatus(); - JobDataDto jobDataDto = getJobHistory( - jobInstance.getId(), - jobInfoDetail.getClusterInstance().getJobManagerHost(), - jobInfoDetail.getInstance().getJid()); + // Update the value of JobData from the flink api while ignoring the null value to prevent + // some other configuration from being overwritten + BeanUtil.copyProperties( + getJobData( + jobInstance.getId(), + jobInfoDetail.getClusterInstance().getJobManagerHost(), + jobInfoDetail.getInstance().getJid()), + jobDataDto, + CopyOptions.create().ignoreNullValue()); if (Asserts.isNull(jobDataDto.getJob()) || jobDataDto.isError()) { // If the job fails to get it, the default Finish Time is the current time @@ -164,7 +172,7 @@ public static boolean refreshJob(JobInfoDetail jobInfoDetail, boolean needSave) * @param jobId The job ID. * @return {@link org.dinky.data.dto.JobDataDto}. */ - public static JobDataDto getJobHistory(Integer id, String jobManagerHost, String jobId) { + public static JobDataDto getJobData(Integer id, String jobManagerHost, String jobId) { JobDataDto.JobDataDtoBuilder builder = JobDataDto.builder(); FlinkAPI api = FlinkAPI.build(jobManagerHost); try { diff --git a/dinky-admin/src/main/java/org/dinky/service/JobInstanceService.java b/dinky-admin/src/main/java/org/dinky/service/JobInstanceService.java index 8211fcd14f..a69d3556cf 100644 --- a/dinky-admin/src/main/java/org/dinky/service/JobInstanceService.java +++ b/dinky-admin/src/main/java/org/dinky/service/JobInstanceService.java @@ -83,7 +83,16 @@ public interface JobInstanceService extends ISuperService { * @param jobInstanceId The ID of the job instance to refresh the job information detail for. * @return A {@link JobInfoDetail} object representing the refreshed job information detail. */ - JobInfoDetail refreshJobInfoDetail(Integer jobInstanceId); + JobInfoDetail refreshJobInfoDetail(Integer jobInstanceId, boolean isForce); + + /** + * Hook the job done for the given job ID and task ID. + * + * @param jobId The ID of the job to hook the job done for. + * @param taskId The ID of the task to hook the job done for. + * @return A boolean indicating whether the hooking was successful or not. + */ + boolean hookJobDone(String jobId, Integer taskId); /** * Refresh the job instances for the given task IDs. diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java index 42367bb1e9..0784a931b4 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/JobInstanceServiceImpl.java @@ -21,7 +21,9 @@ import org.dinky.assertion.Asserts; import org.dinky.context.TenantContextHolder; +import org.dinky.daemon.pool.DefaultThreadPool; import org.dinky.daemon.task.DaemonFactory; +import org.dinky.daemon.task.DaemonTask; import org.dinky.daemon.task.DaemonTaskConfig; import org.dinky.data.dto.ClusterConfigurationDTO; import org.dinky.data.dto.JobDataDto; @@ -56,12 +58,14 @@ import org.springframework.stereotype.Service; +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * JobInstanceServiceImpl @@ -70,6 +74,7 @@ */ @Service @RequiredArgsConstructor +@Slf4j public class JobInstanceServiceImpl extends SuperServiceImpl implements JobInstanceService { @@ -182,18 +187,48 @@ public JobInfoDetail getJobInfoDetailInfo(JobInstance jobInstance) { } @Override - public JobInfoDetail refreshJobInfoDetail(Integer jobInstanceId) { + public JobInfoDetail refreshJobInfoDetail(Integer jobInstanceId, boolean isForce) { JobInfoDetail jobInfoDetail = getJobInfoDetail(jobInstanceId); - JobRefreshHandler.refreshJob(jobInfoDetail, true); - DaemonFactory.refeshOraddTask(DaemonTaskConfig.build(FlinkJobTask.TYPE, jobInstanceId)); + // Directly returns database data if the task has completed and is not a forced refresh + if (JobStatus.isDone(jobInfoDetail.getInstance().getStatus()) && !isForce) { + return jobInfoDetail; + } + boolean isDone = JobRefreshHandler.refreshJob(jobInfoDetail, true); + // If the task becomes incomplete after a forced refresh, it is re-queued to the task + if (!isDone) { + DaemonFactory.refeshOraddTask(DaemonTaskConfig.build(FlinkJobTask.TYPE, jobInstanceId)); + } return jobInfoDetail; } + @Override + public boolean hookJobDone(String jobId, Integer taskId) { + LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper<>(); + queryWrapper.eq(JobInstance::getJid, jobId).eq(JobInstance::getTaskId, taskId); + JobInstance instance = baseMapper.selectOne(queryWrapper); + if (instance == null) { + // Not having a corresponding jobinstance means that this may not have succeeded in running, + // returning true to prevent retry. + return true; + } + DaemonTaskConfig config = DaemonTaskConfig.build(FlinkJobTask.TYPE, instance.getId()); + DaemonTask daemonTask = DefaultThreadPool.getInstance().dequeueByTask(config); + if (daemonTask == null) { + daemonTask = DaemonTask.build(config); + } + boolean isDone = daemonTask.dealTask(); + // If the task is not completed, it is re-queued + if (!isDone) { + DefaultThreadPool.getInstance().execute(daemonTask); + } + return isDone; + } + @Override public void refreshJobByTaskIds(Integer... taskIds) { for (Integer taskId : taskIds) { JobInstance instance = getJobInstanceByTaskId(taskId); - refreshJobInfoDetail(instance.getId()); + refreshJobInfoDetail(instance.getId(), false); } } diff --git a/dinky-app/dinky-app-1.16/src/main/java/org/dinky/app/MainApp.java b/dinky-app/dinky-app-1.16/src/main/java/org/dinky/app/MainApp.java index fbcc98e3e5..5f8e2bef14 100644 --- a/dinky-app/dinky-app-1.16/src/main/java/org/dinky/app/MainApp.java +++ b/dinky-app/dinky-app-1.16/src/main/java/org/dinky/app/MainApp.java @@ -22,6 +22,7 @@ import org.dinky.app.constant.AppParamConstant; import org.dinky.app.db.DBUtil; import org.dinky.app.flinksql.Submitter; +import org.dinky.app.util.FlinkAppUtil; import org.dinky.data.app.AppParamConfig; import org.dinky.utils.JsonUtils; @@ -55,6 +56,9 @@ public static void main(String[] args) throws Exception { } catch (Exception e) { log.error("exectue app failed with config: {}", appConfig); throw e; + } finally { + log.info("Start Monitor Job"); + FlinkAppUtil.monitorFlinkTask(appConfig.getTaskId()); } } } diff --git a/dinky-app/dinky-app-base/src/main/java/org/dinky/app/util/FlinkAppUtil.java b/dinky-app/dinky-app-base/src/main/java/org/dinky/app/util/FlinkAppUtil.java new file mode 100644 index 0000000000..5137d9e6ae --- /dev/null +++ b/dinky-app/dinky-app-base/src/main/java/org/dinky/app/util/FlinkAppUtil.java @@ -0,0 +1,117 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.app.util; + +import org.dinky.app.db.DBUtil; +import org.dinky.constant.FlinkConstant; +import org.dinky.data.enums.JobStatus; +import org.dinky.data.enums.Status; +import org.dinky.utils.JsonUtils; + +import org.apache.flink.client.deployment.StandaloneClusterId; +import org.apache.flink.client.program.rest.RestClusterClient; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.GlobalConfiguration; +import org.apache.flink.runtime.client.JobStatusMessage; + +import java.util.Collection; + +import cn.hutool.core.text.StrFormatter; +import cn.hutool.http.HttpUtil; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class FlinkAppUtil { + + /** + * Utility class for monitoring Flink tasks. + * This method creates a Flink REST client and continuously checks the status of the task until it is completed. + * If the task is completed, it sends a hook notification and stops monitoring. + */ + public static void monitorFlinkTask(int taskId) { + boolean isRun = true; + try (RestClusterClient client = createClient()) { + while (isRun) { + Collection jobs = client.listJobs().get(); + if (jobs.isEmpty()) { + log.error("No Flink task found, try again in 2 seconds....."); + } + for (JobStatusMessage job : jobs) { + if (JobStatus.isDone(job.getJobState().toString())) { + sendHook(taskId, job.getJobId().toHexString(), 0); + log.info("hook {} finished.", job.getJobName()); + // There should be only one in application mode, so stop monitoring here + isRun = false; + } + } + Thread.sleep(5000); + } + } catch (Exception e) { + // If an exception is thrown, it will cause the k8s pod to trigger a restart, + // resulting in an inability to exit normally + log.error("hook failed:", e); + } + } + + /** + * The sendHook method is used to send a Hook request. + * This method sends an HTTP request to notify a specific address about the completion status of a task. + * If the request is successful, the returned code in the result is 0; otherwise, an exception is thrown. + * If sending the request fails, it will be retried up to 30 times with a 1-second interval between each retry. + * If the retry limit is exceeded, an exception is thrown. + */ + private static void sendHook(int taskId, String jobId, int reTryCount) throws InterruptedException { + try { + String dinkyAddr = DBUtil.getSysConfig(Status.SYS_ENV_SETTINGS_DINKYADDR.getKey()); + String url = StrFormatter.format( + "http://{}/api/jobInstance/hookJobDone?taskId={}&jobId={}", dinkyAddr, taskId, jobId); + String resultStr = HttpUtil.get(url); + // TODO 这里应该使用Result实体类,但是Result.class不在comm里,迁移改动太大,暂时不搞 + String code = JsonUtils.parseObject(resultStr).get("code").toString(); + if (!"0".equals(code)) { + throw new RuntimeException("Hook Job Done result failed: " + resultStr); + } + } catch (Exception e) { + if (reTryCount < 30) { + log.error("send hook failed,retry later taskId:{},jobId:{},{}", taskId, jobId, e.getMessage()); + Thread.sleep(1000); + sendHook(taskId, jobId, reTryCount + 1); + } else { + throw new RuntimeException("Hook Job Done failed, The retry limit is exceeded: " + e.getMessage()); + } + } + } + + /** + * Create a REST cluster client for Flink. + * @return + * @throws Exception + */ + private static RestClusterClient createClient() throws Exception { + Configuration config; + Configuration fromEnvConfig = GlobalConfiguration.loadConfiguration(); + if (!fromEnvConfig.keySet().isEmpty()) { + config = fromEnvConfig; + } else { + config = GlobalConfiguration.loadConfiguration(FlinkConstant.DEFAULT_FLINK_HOME); + } + return new RestClusterClient<>(config, StandaloneClusterId.getInstance()); + } +} diff --git a/dinky-daemon/src/main/java/org/dinky/daemon/entity/TaskQueue.java b/dinky-daemon/src/main/java/org/dinky/daemon/entity/TaskQueue.java index 7391b97985..c51eaddffd 100644 --- a/dinky-daemon/src/main/java/org/dinky/daemon/entity/TaskQueue.java +++ b/dinky-daemon/src/main/java/org/dinky/daemon/entity/TaskQueue.java @@ -19,6 +19,9 @@ package org.dinky.daemon.entity; +import org.dinky.daemon.task.DaemonTask; +import org.dinky.daemon.task.DaemonTaskConfig; + import java.util.LinkedList; import lombok.Getter; @@ -26,7 +29,7 @@ @Getter @Slf4j -public class TaskQueue { +public class TaskQueue { private final LinkedList tasks = new LinkedList<>(); @@ -36,7 +39,7 @@ public void enqueue(T task) { synchronized (lock) { lock.notifyAll(); // prevent duplicate additions - tasks.remove(task); + dequeueByTask(task.getConfig()); tasks.addLast(task); } } @@ -54,6 +57,21 @@ public T dequeue() { } } + public T dequeueByTask(DaemonTaskConfig task) { + synchronized (lock) { + T find = null; + for (T t : tasks) { + if (t.getConfig().equals(task)) { + find = t; + } + } + if (find != null) { + tasks.remove(find); + } + return find; + } + } + public int getTaskSize() { synchronized (lock) { return tasks.size(); diff --git a/dinky-daemon/src/main/java/org/dinky/daemon/pool/DefaultThreadPool.java b/dinky-daemon/src/main/java/org/dinky/daemon/pool/DefaultThreadPool.java index dcc9467884..9d0c91244c 100644 --- a/dinky-daemon/src/main/java/org/dinky/daemon/pool/DefaultThreadPool.java +++ b/dinky-daemon/src/main/java/org/dinky/daemon/pool/DefaultThreadPool.java @@ -22,6 +22,7 @@ import org.dinky.daemon.entity.TaskQueue; import org.dinky.daemon.entity.TaskWorker; import org.dinky.daemon.task.DaemonTask; +import org.dinky.daemon.task.DaemonTaskConfig; import java.util.ArrayList; import java.util.Collections; @@ -70,6 +71,11 @@ public void execute(DaemonTask daemonTask) { } } + @Override + public DaemonTask dequeueByTask(DaemonTaskConfig daemonTask) { + return queue.dequeueByTask(daemonTask); + } + @Override public void addWorkers(int num) { synchronized (lock) { diff --git a/dinky-daemon/src/main/java/org/dinky/daemon/pool/ThreadPool.java b/dinky-daemon/src/main/java/org/dinky/daemon/pool/ThreadPool.java index 85cbf2b37e..d4ddd8c425 100644 --- a/dinky-daemon/src/main/java/org/dinky/daemon/pool/ThreadPool.java +++ b/dinky-daemon/src/main/java/org/dinky/daemon/pool/ThreadPool.java @@ -20,6 +20,7 @@ package org.dinky.daemon.pool; import org.dinky.daemon.task.DaemonTask; +import org.dinky.daemon.task.DaemonTaskConfig; /** * @operate @@ -30,6 +31,8 @@ public interface ThreadPool { // 执行任务 void execute(DaemonTask daemonTask); + DaemonTask dequeueByTask(DaemonTaskConfig daemonTask); + // 关闭连接池 void shutdown(); diff --git a/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTask.java b/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTask.java index 4ea7522ea2..db56f8cc2b 100644 --- a/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTask.java +++ b/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTask.java @@ -49,6 +49,8 @@ static DaemonTask build(DaemonTaskConfig config) { DaemonTask setConfig(DaemonTaskConfig config); + DaemonTaskConfig getConfig(); + default boolean canHandle(String type) { return Asserts.isEqualsIgnoreCase(getType(), type); } diff --git a/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTaskConfig.java b/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTaskConfig.java index 89a62da69d..811e9a2993 100644 --- a/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTaskConfig.java +++ b/dinky-daemon/src/main/java/org/dinky/daemon/task/DaemonTaskConfig.java @@ -19,6 +19,8 @@ package org.dinky.daemon.task; +import java.util.Objects; + import lombok.Getter; @Getter @@ -35,4 +37,17 @@ public DaemonTaskConfig(String type, Integer id) { public static DaemonTaskConfig build(String type, Integer id) { return new DaemonTaskConfig(type, id); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DaemonTaskConfig that = (DaemonTaskConfig) o; + return Objects.equals(type, that.type) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(type, id); + } } diff --git a/dinky-executor/src/main/java/org/dinky/constant/FlinkConstant.java b/dinky-executor/src/main/java/org/dinky/constant/FlinkConstant.java index ef3476afb8..0c474c1032 100644 --- a/dinky-executor/src/main/java/org/dinky/constant/FlinkConstant.java +++ b/dinky-executor/src/main/java/org/dinky/constant/FlinkConstant.java @@ -36,4 +36,6 @@ public interface FlinkConstant { String LOCAL_HOST = "localhost:8081"; /** changlog op */ String OP = "op"; + + String DEFAULT_FLINK_HOME = "/opt/flink"; } From 9edfac5e5bb7852938a1aed91475461b26734fd1 Mon Sep 17 00:00:00 2001 From: gaoyan Date: Sat, 4 Nov 2023 21:15:03 +0800 Subject: [PATCH 09/21] Fill lost other flink version (#2495) * Optimize the process * fill lost flik other version --- .../dinky-app-1.14/src/main/java/org/dinky/app/MainApp.java | 4 ++++ .../dinky-app-1.15/src/main/java/org/dinky/app/MainApp.java | 4 ++++ .../dinky-app-1.17/src/main/java/org/dinky/app/MainApp.java | 4 ++++ .../dinky-app-1.18/src/main/java/org/dinky/app/MainApp.java | 4 ++++ 4 files changed, 16 insertions(+) diff --git a/dinky-app/dinky-app-1.14/src/main/java/org/dinky/app/MainApp.java b/dinky-app/dinky-app-1.14/src/main/java/org/dinky/app/MainApp.java index fbcc98e3e5..5f8e2bef14 100644 --- a/dinky-app/dinky-app-1.14/src/main/java/org/dinky/app/MainApp.java +++ b/dinky-app/dinky-app-1.14/src/main/java/org/dinky/app/MainApp.java @@ -22,6 +22,7 @@ import org.dinky.app.constant.AppParamConstant; import org.dinky.app.db.DBUtil; import org.dinky.app.flinksql.Submitter; +import org.dinky.app.util.FlinkAppUtil; import org.dinky.data.app.AppParamConfig; import org.dinky.utils.JsonUtils; @@ -55,6 +56,9 @@ public static void main(String[] args) throws Exception { } catch (Exception e) { log.error("exectue app failed with config: {}", appConfig); throw e; + } finally { + log.info("Start Monitor Job"); + FlinkAppUtil.monitorFlinkTask(appConfig.getTaskId()); } } } diff --git a/dinky-app/dinky-app-1.15/src/main/java/org/dinky/app/MainApp.java b/dinky-app/dinky-app-1.15/src/main/java/org/dinky/app/MainApp.java index fbcc98e3e5..5f8e2bef14 100644 --- a/dinky-app/dinky-app-1.15/src/main/java/org/dinky/app/MainApp.java +++ b/dinky-app/dinky-app-1.15/src/main/java/org/dinky/app/MainApp.java @@ -22,6 +22,7 @@ import org.dinky.app.constant.AppParamConstant; import org.dinky.app.db.DBUtil; import org.dinky.app.flinksql.Submitter; +import org.dinky.app.util.FlinkAppUtil; import org.dinky.data.app.AppParamConfig; import org.dinky.utils.JsonUtils; @@ -55,6 +56,9 @@ public static void main(String[] args) throws Exception { } catch (Exception e) { log.error("exectue app failed with config: {}", appConfig); throw e; + } finally { + log.info("Start Monitor Job"); + FlinkAppUtil.monitorFlinkTask(appConfig.getTaskId()); } } } diff --git a/dinky-app/dinky-app-1.17/src/main/java/org/dinky/app/MainApp.java b/dinky-app/dinky-app-1.17/src/main/java/org/dinky/app/MainApp.java index fbcc98e3e5..5f8e2bef14 100644 --- a/dinky-app/dinky-app-1.17/src/main/java/org/dinky/app/MainApp.java +++ b/dinky-app/dinky-app-1.17/src/main/java/org/dinky/app/MainApp.java @@ -22,6 +22,7 @@ import org.dinky.app.constant.AppParamConstant; import org.dinky.app.db.DBUtil; import org.dinky.app.flinksql.Submitter; +import org.dinky.app.util.FlinkAppUtil; import org.dinky.data.app.AppParamConfig; import org.dinky.utils.JsonUtils; @@ -55,6 +56,9 @@ public static void main(String[] args) throws Exception { } catch (Exception e) { log.error("exectue app failed with config: {}", appConfig); throw e; + } finally { + log.info("Start Monitor Job"); + FlinkAppUtil.monitorFlinkTask(appConfig.getTaskId()); } } } diff --git a/dinky-app/dinky-app-1.18/src/main/java/org/dinky/app/MainApp.java b/dinky-app/dinky-app-1.18/src/main/java/org/dinky/app/MainApp.java index fbcc98e3e5..5f8e2bef14 100644 --- a/dinky-app/dinky-app-1.18/src/main/java/org/dinky/app/MainApp.java +++ b/dinky-app/dinky-app-1.18/src/main/java/org/dinky/app/MainApp.java @@ -22,6 +22,7 @@ import org.dinky.app.constant.AppParamConstant; import org.dinky.app.db.DBUtil; import org.dinky.app.flinksql.Submitter; +import org.dinky.app.util.FlinkAppUtil; import org.dinky.data.app.AppParamConfig; import org.dinky.utils.JsonUtils; @@ -55,6 +56,9 @@ public static void main(String[] args) throws Exception { } catch (Exception e) { log.error("exectue app failed with config: {}", appConfig); throw e; + } finally { + log.info("Start Monitor Job"); + FlinkAppUtil.monitorFlinkTask(appConfig.getTaskId()); } } } From 9d39c02c78062f09277b7e29b21ed099ad0ce69d Mon Sep 17 00:00:00 2001 From: gaoyan Date: Sun, 5 Nov 2023 18:21:11 +0800 Subject: [PATCH 10/21] Fix task version bug (#2496) * Optimize the process * fix task version bug * fix task version bug * fix task version bug --- .../data/dto/TaskRollbackVersionDTO.java | 2 +- .../org/dinky/data/model/TaskVersion.java | 19 +++++++++++++ .../org/dinky/service/TaskVersionService.java | 3 ++- .../dinky/service/impl/TaskServiceImpl.java | 7 ++--- .../service/impl/TaskVersionServiceImpl.java | 16 +++++------ .../Console/ConsoleContent.tsx | 25 ++++++++--------- .../RightContainer/HistoryVersion/index.tsx | 27 ++++++++++--------- .../JobDetail/JobVersion/JobVersionTab.tsx | 2 +- 8 files changed, 62 insertions(+), 39 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskRollbackVersionDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskRollbackVersionDTO.java index 0888ab698c..3b3cf0e94c 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskRollbackVersionDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskRollbackVersionDTO.java @@ -31,7 +31,7 @@ public class TaskRollbackVersionDTO implements Serializable { @ApiModelProperty(value = "ID", dataType = "Integer", example = "1", notes = "The identifier of the task") - private Integer id; + private Integer taskId; @ApiModelProperty( value = "Version ID", diff --git a/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java b/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java index 34efe5ac5d..83bda4d0e3 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/TaskVersion.java @@ -23,6 +23,7 @@ import java.io.Serializable; import java.time.LocalDateTime; +import java.util.Objects; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableName; @@ -84,4 +85,22 @@ public class TaskVersion implements Serializable { @ApiModelProperty(value = "Create Time", dataType = "Date", notes = "Timestamp when the version was created") @TableField(value = "create_time") private LocalDateTime createTime; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskVersion that = (TaskVersion) o; + return Objects.equals(taskId, that.taskId) + && Objects.equals(versionId, that.versionId) + && Objects.equals(statement, that.statement) + && Objects.equals(dialect, that.dialect) + && Objects.equals(type, that.type) + && Objects.equals(taskConfigure, that.taskConfigure); + } + + @Override + public int hashCode() { + return Objects.hash(taskId, versionId, statement, dialect, type, taskConfigure); + } } diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java b/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java index 0380cdd4a4..1524c7d2a3 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskVersionService.java @@ -38,6 +38,7 @@ public interface TaskVersionService extends ISuperService { * Create a snapshot of a task version. * * @param task A {@link TaskDTO} object representing the task to create a snapshot for. + * @return */ - void createTaskVersionSnapshot(TaskDTO task); + Integer createTaskVersionSnapshot(TaskDTO task); } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index 26b93a2537..992a0445e8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -436,7 +436,8 @@ public boolean changeTaskLifeRecyle(Integer taskId, JobLifeCycle lifeCycle) thro // sqlExplainResult.getError())); // } // } - taskVersionService.createTaskVersionSnapshot(task); + Integer taskVersionId = taskVersionService.createTaskVersionSnapshot(task); + task.setVersionId(taskVersionId); } return saveOrUpdate(task.buildTask()); } @@ -558,12 +559,12 @@ public List getAllUDF() { @Override public boolean rollbackTask(TaskRollbackVersionDTO dto) { - if (Asserts.isNull(dto.getVersionId()) || Asserts.isNull(dto.getId())) { + if (Asserts.isNull(dto.getVersionId()) || Asserts.isNull(dto.getTaskId())) { throw new BusException("the version is error"); } LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper() - .eq(TaskVersion::getTaskId, dto.getId()) + .eq(TaskVersion::getTaskId, dto.getTaskId()) .eq(TaskVersion::getVersionId, dto.getVersionId()); TaskVersion taskVersion = taskVersionService.getOne(queryWrapper); diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java index f7bce348ae..081e600cb2 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskVersionServiceImpl.java @@ -52,7 +52,7 @@ public List getTaskVersionByTaskId(Integer taskId) { } @Override - public void createTaskVersionSnapshot(TaskDTO task) { + public Integer createTaskVersionSnapshot(TaskDTO task) { List taskVersions = getTaskVersionByTaskId(task.getId()); List versionIds = taskVersions.stream().map(TaskVersion::getVersionId).collect(Collectors.toList()); @@ -69,22 +69,22 @@ public void createTaskVersionSnapshot(TaskDTO task) { taskVersion.setTaskId(taskVersion.getId()); taskVersion.setId(null); - if (Asserts.isNull(task.getVersionId())) { + if (Asserts.isNull(task.getVersionId()) || !versionIds.contains(task.getVersionId())) { // FIRST RELEASE, ADD NEW VERSION taskVersion.setVersionId(1); - task.setVersionId(1); taskVersion.setCreateTime(LocalDateTime.now()); save(taskVersion); } else { // Explain that there is a version, you need to determine whether it is an old version after fallback - TaskVersion version = versionMap.get(task.getVersionId()); - version.setId(null); - if (versionIds.contains(task.getVersionId()) && !taskVersion.equals(version)) { - taskVersion.setVersionId(Collections.max(versionIds) + 1); - task.setVersionId(Collections.max(versionIds) + 1); + TaskVersion version = versionMap.getOrDefault(task.getVersionId(), new TaskVersion()); + // IDs are not involved in the comparison + if (!taskVersion.equals(version)) { + int newVersionId = versionIds.isEmpty() ? 1 : Collections.max(versionIds) + 1; + taskVersion.setVersionId(newVersionId); taskVersion.setCreateTime(LocalDateTime.now()); save(taskVersion); } } + return taskVersion.getVersionId(); } } diff --git a/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx b/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx index 54f86346e1..6a2104351e 100644 --- a/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx +++ b/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx @@ -1,19 +1,19 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ @@ -163,6 +163,7 @@ const ConsoleContent = (props: ConsoleProps) => { onSelect={onSelect} treeData={[processNode]} expandedKeys={expandedKeys} + expandAction={"doubleClick"} onExpand={handleExpand} /> ) : ( diff --git a/dinky-web/src/pages/DataStudio/RightContainer/HistoryVersion/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/HistoryVersion/index.tsx index 7bb152a5e7..ec955497aa 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/HistoryVersion/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/HistoryVersion/index.tsx @@ -1,21 +1,22 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ + import VersionList from '@/components/VersionList'; import { getCurrentData } from '@/pages/DataStudio/function'; import { StateType } from '@/pages/DataStudio/model'; @@ -111,7 +112,7 @@ const HistoryVersion = (props: any) => { cancelText: l('button.cancel'), onOk: async () => { const TaskVersionRollbackItem = { - id: current?.key, + taskId: row.taskId, versionId: row.versionId }; await handleOption( diff --git a/dinky-web/src/pages/DevOps/JobDetail/JobVersion/JobVersionTab.tsx b/dinky-web/src/pages/DevOps/JobDetail/JobVersion/JobVersionTab.tsx index 27042082d4..56949e5d7d 100644 --- a/dinky-web/src/pages/DevOps/JobDetail/JobVersion/JobVersionTab.tsx +++ b/dinky-web/src/pages/DevOps/JobDetail/JobVersion/JobVersionTab.tsx @@ -39,7 +39,7 @@ const JobVersionTab = (props: JobProps) => { isLatest: true }; - const [currentVersion, setCurrentVersion] = useState(); + const [currentVersion, setCurrentVersion] = useState(latestVersion); const versionList = useRequest( { From 9b76c75d33f84da928d27d7440f4f987e05bc6cf Mon Sep 17 00:00:00 2001 From: Licho Date: Mon, 6 Nov 2023 09:51:05 +0800 Subject: [PATCH 11/21] fix: h2 initialize db.sql multiple times (#2497) Signed-off-by: licho --- dinky-admin/src/main/resources/application-h2.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dinky-admin/src/main/resources/application-h2.yml b/dinky-admin/src/main/resources/application-h2.yml index bde759bcd2..0f732c55c4 100644 --- a/dinky-admin/src/main/resources/application-h2.yml +++ b/dinky-admin/src/main/resources/application-h2.yml @@ -20,4 +20,6 @@ spring: driver-class-name: org.h2.Driver url: jdbc:h2:mem:dinky;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:db/db-h2.sql' username: dinky - password: dinky \ No newline at end of file + password: dinky + druid: + initial-size: 1 \ No newline at end of file From ded5ae681f502c1317e222045479d280ef602d76 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Mon, 6 Nov 2023 11:04:24 +0800 Subject: [PATCH 12/21] [Fix][Web]Fix resources manage (#2500) * fix_resources_manage * fix_resources_manage * fix_resources_manage --- .../org/dinky/controller/UDFController.java | 2 +- .../components/ResourceOverView/index.tsx | 532 +++++++++--------- 2 files changed, 268 insertions(+), 266 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index d3940dd765..d905e2b100 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -88,7 +88,7 @@ public Result> udfResourcesList() { * @return Result */ @PostMapping("/addOrUpdateByResourceId") - public Result saveOrUpdate(@RequestBody CommonDTO> dto) { + public Result addOrUpdateByResourceId(@RequestBody CommonDTO> dto) { udfService.addOrUpdateByResourceId(dto.getData()); return Result.succeed(); } diff --git a/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx b/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx index bfdc69f0be..e788ec9fc0 100644 --- a/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx +++ b/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx @@ -1,301 +1,303 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ import RightContextMenu from '@/components/RightContextMenu'; -import { AuthorizedObject, useAccess } from '@/hooks/useAccess'; +import {AuthorizedObject, useAccess} from '@/hooks/useAccess'; import { - RIGHT_CONTEXT_FILE_MENU, - RIGHT_CONTEXT_FOLDER_MENU + RIGHT_CONTEXT_FILE_MENU, + RIGHT_CONTEXT_FOLDER_MENU } from '@/pages/RegCenter/Resource/components/constants'; import FileShow from '@/pages/RegCenter/Resource/components/FileShow'; import FileTree from '@/pages/RegCenter/Resource/components/FileTree'; import ResourceModal from '@/pages/RegCenter/Resource/components/ResourceModal'; import ResourcesUploadModal from '@/pages/RegCenter/Resource/components/ResourcesUploadModal'; -import { handleOption, handleRemoveById, queryDataByParams } from '@/services/BusinessCrud'; -import { API_CONSTANTS } from '@/services/endpoints'; -import { ResourceInfo } from '@/types/RegCenter/data'; -import { InitResourceState } from '@/types/RegCenter/init.d'; -import { ResourceState } from '@/types/RegCenter/state.d'; -import { l } from '@/utils/intl'; -import { ProCard } from '@ant-design/pro-components'; -import { MenuInfo } from 'rc-menu/es/interface'; -import { Resizable } from 're-resizable'; -import React, { useCallback, useEffect, useState } from 'react'; +import {handleOption, handleRemoveById, queryDataByParams} from '@/services/BusinessCrud'; +import {API_CONSTANTS} from '@/services/endpoints'; +import {ResourceInfo} from '@/types/RegCenter/data'; +import {InitResourceState} from '@/types/RegCenter/init.d'; +import {ResourceState} from '@/types/RegCenter/state.d'; +import {l} from '@/utils/intl'; +import {ProCard} from '@ant-design/pro-components'; +import {MenuInfo} from 'rc-menu/es/interface'; +import {Resizable} from 're-resizable'; +import React, {useCallback, useState} from 'react'; +import {useAsyncEffect} from "ahooks"; +import {unSupportView} from "@/utils/function"; const ResourceOverView: React.FC = () => { - const [resourceState, setResourceState] = useState(InitResourceState); + const [resourceState, setResourceState] = useState(InitResourceState); - const [editModal, setEditModal] = useState(''); + const [editModal, setEditModal] = useState(''); - const [uploadValue] = useState({ - url: API_CONSTANTS.RESOURCE_UPLOAD, - pid: '', - description: '' - }); + const [uploadValue] = useState({ + url: API_CONSTANTS.RESOURCE_UPLOAD, + pid: '', + description: '' + }); - const refreshTree = async () => { - await queryDataByParams(API_CONSTANTS.RESOURCE_SHOW_TREE).then((res) => - setResourceState((prevState) => ({ ...prevState, treeData: res ?? [] })) - ); - }; + const refreshTree = async () => { + await queryDataByParams(API_CONSTANTS.RESOURCE_SHOW_TREE).then((res) => + setResourceState((prevState) => ({...prevState, treeData: res ?? []})) + ); + }; + + useAsyncEffect(async () => { + await refreshTree(); + }, []) - useEffect(() => { - refreshTree(); - }, [resourceState]); + /** + * query content by id + * @type {(id: number) => Promise} + */ + const queryContent: (id: number) => Promise = useCallback( + async (id: number) => { + await queryDataByParams(API_CONSTANTS.RESOURCE_GET_CONTENT_BY_ID, { + id + }).then((res) => setResourceState((prevState) => ({...prevState, content: res ?? ''}))); + }, + [] + ); - /** - * query content by id - * @type {(id: number) => Promise} - */ - const queryContent = useCallback( - async (id: number) => { - await queryDataByParams(API_CONSTANTS.RESOURCE_GET_CONTENT_BY_ID, { - id - }).then((res) => setResourceState((prevState) => ({ ...prevState, content: res ?? '' }))); - }, - [resourceState.clickedNode] - ); + /** + * the node click event + * @param info + * @returns {Promise} + */ + const handleNodeClick = async (info: any): Promise => { + const { + node: {id, isLeaf, key, name}, + node + } = info; + setResourceState((prevState) => ({...prevState, selectedKeys: [key], clickedNode: node})); + if (isLeaf && !unSupportView(name)) { + await queryContent(id); + } else { + setResourceState((prevState) => ({...prevState, content: ''})); + } + }; - /** - * the node click event - * @param info - * @returns {Promise} - */ - const handleNodeClick = async (info: any): Promise => { - const { - node: { id, isLeaf, key }, - node - } = info; - setResourceState((prevState) => ({ ...prevState, selectedKeys: [key], clickedNode: node })); - if (isLeaf) { - await queryContent(id); - } else { - setResourceState((prevState) => ({ ...prevState, content: '' })); - } - }; + /** + * the node right click event OF upload, + */ + const handleCreateFolder = () => { + if (resourceState.rightClickedNode) { + setEditModal('createFolder'); + const {id} = resourceState.rightClickedNode; + setResourceState((prevState) => ({ + ...prevState, + editOpen: true, + value: {id, fileName: '', description: ''}, + contextMenuOpen: false + })); + } + }; + const handleUpload = () => { + if (resourceState.rightClickedNode) { + uploadValue.pid = resourceState.rightClickedNode.id; + // todo: upload + setResourceState((prevState) => ({...prevState, uploadOpen: true, contextMenuOpen: false})); + } + }; - /** - * the node right click event OF upload, - */ - const handleCreateFolder = () => { - if (resourceState.rightClickedNode) { - setEditModal('createFolder'); - const { id } = resourceState.rightClickedNode; - setResourceState((prevState) => ({ - ...prevState, - editOpen: true, - value: { id, fileName: '', description: '' }, - contextMenuOpen: false - })); - } - }; - const handleUpload = () => { - if (resourceState.rightClickedNode) { - uploadValue.pid = resourceState.rightClickedNode.id; - // todo: upload - setResourceState((prevState) => ({ ...prevState, uploadOpen: true, contextMenuOpen: false })); - } - }; + /** + * the node right click event OF delete, + */ + const handleDelete = async () => { + if (resourceState.rightClickedNode) { + setResourceState((prevState) => ({...prevState, contextMenuOpen: false})); + await handleRemoveById(API_CONSTANTS.RESOURCE_REMOVE, resourceState.rightClickedNode.id); + await refreshTree(); + } + }; - /** - * the node right click event OF delete, - */ - const handleDelete = async () => { - if (resourceState.rightClickedNode) { - setResourceState((prevState) => ({ ...prevState, contextMenuOpen: false })); - await handleRemoveById(API_CONSTANTS.RESOURCE_REMOVE, resourceState.rightClickedNode.id); - await refreshTree(); - } - }; + /** + * the node right click event OF rename, + */ + const handleRename = () => { + if (resourceState.rightClickedNode) { + setEditModal('rename'); + const {id, name, desc} = resourceState.rightClickedNode; + setResourceState((prevState) => ({ + ...prevState, + editOpen: true, + value: {id, fileName: name, description: desc}, + contextMenuOpen: false + })); + } + }; - /** - * the node right click event OF rename, - */ - const handleRename = () => { - if (resourceState.rightClickedNode) { - setEditModal('rename'); - const { id, name, desc } = resourceState.rightClickedNode; - setResourceState((prevState) => ({ - ...prevState, - editOpen: true, - value: { id, fileName: name, description: desc }, - contextMenuOpen: false - })); - } - }; + const handleMenuClick = async (node: MenuInfo) => { + switch (node.key) { + case 'createFolder': + handleCreateFolder(); + break; + case 'upload': + handleUpload(); + break; + case 'delete': + await handleDelete(); + break; + case 'rename': + handleRename(); + break; + default: + break; + } + }; - const handleMenuClick = (node: MenuInfo) => { - switch (node.key) { - case 'createFolder': - handleCreateFolder(); - break; - case 'upload': - handleUpload(); - break; - case 'delete': - handleDelete(); - break; - case 'rename': - handleRename(); - break; - default: - break; - } - }; + /** + * the right click event + * @param info + */ + const handleRightClick = (info: any) => { + // 获取右键点击的节点信息 + const {node, event} = info; + console.log('node', node); + setResourceState((prevState) => ({ + ...prevState, + selectedKeys: [node.key], + rightClickedNode: node, + contextMenuOpen: true, + contextMenuPosition: { + ...prevState.contextMenuPosition, + left: event.clientX + 20, + top: event.clientY + 20 + } + })); + }; - /** - * the right click event - * @param info - */ - const handleRightClick = (info: any) => { - // 获取右键点击的节点信息 - const { node, event } = info; - console.log('node', node); - setResourceState((prevState) => ({ - ...prevState, - selectedKeys: [node.key], - rightClickedNode: node, - contextMenuOpen: true, - contextMenuPosition: { - ...prevState.contextMenuPosition, - left: event.clientX + 20, - top: event.clientY + 20 - } - })); - }; + /** + * the rename cancel + */ + const handleModalCancel = async () => { + setResourceState((prevState) => ({...prevState, editOpen: false})); + await refreshTree(); + }; - /** - * the rename cancel - */ - const handleModalCancel = () => { - setResourceState((prevState) => ({ ...prevState, editOpen: false })); - refreshTree(); - }; + /** + * the rename ok + */ + const handleModalSubmit = async (value: Partial) => { + const {id: pid} = resourceState.rightClickedNode; + if (editModal === 'createFolder') { + await handleOption(API_CONSTANTS.RESOURCE_CREATE_FOLDER, l('right.menu.createFolder'), { + ...value, + pid + }); + setResourceState((prevState) => ({...prevState, editOpen: false})); + } else if (editModal === 'rename') { + await handleOption(API_CONSTANTS.RESOURCE_RENAME, l('right.menu.rename'), {...value, pid}); + } + }; + const handleUploadCancel = async () => { + setResourceState((prevState) => ({...prevState, uploadOpen: false})); + await refreshTree(); + }; - /** - * the rename ok - */ - const handleModalSubmit = async (value: Partial) => { - const { id: pid } = resourceState.rightClickedNode; - if (editModal === 'createFolder') { - await handleOption(API_CONSTANTS.RESOURCE_CREATE_FOLDER, l('right.menu.createFolder'), { - ...value, - pid - }); - setResourceState((prevState) => ({ ...prevState, editOpen: false })); - } else if (editModal === 'rename') { - await handleOption(API_CONSTANTS.RESOURCE_RENAME, l('right.menu.rename'), { ...value, pid }); - } - }; - const handleUploadCancel = async () => { - setResourceState((prevState) => ({ ...prevState, uploadOpen: false })); - await refreshTree(); - }; + /** + * the content change + * @param value + */ + const handleContentChange = (value: any) => { + setResourceState((prevState) => ({...prevState, content: value})); + // todo: save content + }; - /** - * the content change - * @param value - */ - const handleContentChange = (value: any) => { - setResourceState((prevState) => ({ ...prevState, content: value })); - // todo: save content - }; + const access = useAccess(); - const access = useAccess(); + const renderRightMenu = () => { + if (!resourceState.rightClickedNode.isLeaf) { + return RIGHT_CONTEXT_FOLDER_MENU.filter( + (menu) => !menu.path || !!AuthorizedObject({path: menu.path, children: menu, access}) + ); + } + return RIGHT_CONTEXT_FILE_MENU.filter( + (menu) => !menu.path || !!AuthorizedObject({path: menu.path, children: menu, access}) + ); + }; - const renderRightMenu = () => { - if (!resourceState.rightClickedNode.isLeaf) { - return RIGHT_CONTEXT_FOLDER_MENU.filter( - (menu) => !!!menu.path || !!AuthorizedObject({ path: menu.path, children: menu, access }) - ); - } - return RIGHT_CONTEXT_FILE_MENU.filter( - (menu) => !!!menu.path || !!AuthorizedObject({ path: menu.path, children: menu, access }) + /** + * render + */ + return ( + <> + + + + handleNodeClick(info)} + /> + + setResourceState((prevState) => ({...prevState, contextMenuOpen: false})) + } + items={renderRightMenu()} + onClick={handleMenuClick} + /> + + + + + + + + {resourceState.editOpen && ( + + )} + {resourceState.uploadOpen && ( + + )} + ); - }; - - /** - * render - */ - return ( - <> - - - - handleNodeClick(info)} - /> - - setResourceState((prevState) => ({ ...prevState, contextMenuOpen: false })) - } - items={renderRightMenu()} - onClick={handleMenuClick} - /> - - - - - - - - {resourceState.editOpen && ( - - )} - {resourceState.uploadOpen && ( - - )} - - ); }; export default ResourceOverView; From 628cc539a033fcadd4d7aeab3ddf7cab2baf52a1 Mon Sep 17 00:00:00 2001 From: aiwenmo <32723967+aiwenmo@users.noreply.github.com> Date: Mon, 6 Nov 2023 21:58:59 +0800 Subject: [PATCH 13/21] [Feature-1930][dlink-client] Analyze custom functions from Flink SQL (#2502) * [Feature-1930][dlink-client] Analyze custom functions from Flink SQL * fix error --------- Co-authored-by: wenmo <32723967+wenmo@users.noreply.github.com> --- .../apache/flink/table/delegation/Parser.java | 17 ++++ .../table/planner/delegation/ParserImpl.java | 14 ++++ .../executor/CustomTableEnvironmentImpl.java | 2 +- .../java/org/dinky/utils/FunctionVisitor.java | 54 +++++++++++++ .../java/org/dinky/utils/LineageContext.java | 59 +++++++++++++- .../org/dinky/utils/LineageContextTest.java | 57 ++++++++----- .../org/dinky/utils/MySuffixFunction.java | 33 ++++++++ .../apache/flink/table/delegation/Parser.java | 17 ++++ .../table/planner/delegation/ParserImpl.java | 14 ++++ .../executor/CustomTableEnvironmentImpl.java | 2 +- .../java/org/dinky/utils/FunctionVisitor.java | 54 +++++++++++++ .../java/org/dinky/utils/LineageContext.java | 62 ++++++++++++++- .../org/dinky/utils/LineageContextTest.java | 57 ++++++++----- .../org/dinky/utils/MySuffixFunction.java | 33 ++++++++ .../executor/CustomTableEnvironmentImpl.java | 5 +- .../org/dinky/executor/ExtendedParser.java | 8 ++ .../org/dinky/executor/ParserWrapper.java | 16 ++++ .../java/org/dinky/utils/FunctionVisitor.java | 54 +++++++++++++ .../java/org/dinky/utils/LineageContext.java | 79 ++++++++++++++++++- .../org/dinky/utils/LineageContextTest.java | 66 ++++++++++------ .../org/dinky/utils/MySuffixFunction.java | 33 ++++++++ .../executor/CustomTableEnvironmentImpl.java | 5 +- .../org/dinky/executor/ExtendedParser.java | 8 ++ .../org/dinky/executor/ParserWrapper.java | 16 ++++ .../java/org/dinky/utils/FunctionVisitor.java | 54 +++++++++++++ .../java/org/dinky/utils/LineageContext.java | 68 +++++++++++++++- .../org/dinky/utils/LineageContextTest.java | 66 ++++++++++------ .../org/dinky/utils/MySuffixFunction.java | 33 ++++++++ .../org/dinky/data/model/FunctionResult.java | 52 ++++++++++++ .../java/org/dinky/data/model/LineageRel.java | 55 ++++++++----- .../java/org/dinky/executor/CustomParser.java | 17 ++++ .../org/dinky/parser/CustomParserImpl.java | 24 ++++++ 32 files changed, 1004 insertions(+), 130 deletions(-) create mode 100644 dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FunctionVisitor.java create mode 100644 dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/MySuffixFunction.java create mode 100644 dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FunctionVisitor.java create mode 100644 dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/MySuffixFunction.java create mode 100644 dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FunctionVisitor.java create mode 100644 dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/MySuffixFunction.java create mode 100644 dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FunctionVisitor.java create mode 100644 dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/MySuffixFunction.java create mode 100644 dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/FunctionResult.java diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/delegation/Parser.java b/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/delegation/Parser.java index 9222415033..ea3e9cd5b6 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/delegation/Parser.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/delegation/Parser.java @@ -89,4 +89,21 @@ public interface Parser { * @throws SqlParserException if an exception is thrown when parsing the statement */ SqlNode parseExpression(String sqlExpression); + + /** + * Entry point for parsing SQL and return the abstract syntax tree + * + * @param statement the SQL statement to evaluate + * @return abstract syntax tree + * @throws org.apache.flink.table.api.SqlParserException when failed to parse the statement + */ + SqlNode parseSql(String statement); + + /** + * validate the query + * + * @param sqlNode SqlNode to execute on + * @return validated sqlNode + */ + SqlNode validate(SqlNode sqlNode); } diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java b/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java index b8f94aef0c..6c724bc7ba 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java @@ -148,4 +148,18 @@ public CatalogManager getCatalogManager() { public SqlNode parseExpression(String sqlExpression) { return calciteParserSupplier.get().parseExpression(sqlExpression); } + + @Override + public SqlNode parseSql(String statement) { + CalciteParser parser = calciteParserSupplier.get(); + + // parse the sql query + return parser.parse(statement); + } + + @Override + public SqlNode validate(SqlNode sqlNode) { + FlinkPlannerImpl flinkPlanner = validatorSupplier.get(); + return flinkPlanner.validate(sqlNode); + } } diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 022897a9ab..2c121d8425 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -360,7 +360,7 @@ public void createTemporaryView(String path, DataStream dataStream, Strin @Override public List getLineage(String statement) { LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); - return lineageContext.getLineage(statement); + return lineageContext.analyzeLineage(statement); } @Override diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FunctionVisitor.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FunctionVisitor.java new file mode 100644 index 0000000000..bdda641b0c --- /dev/null +++ b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/FunctionVisitor.java @@ -0,0 +1,54 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlFunction; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.util.SqlBasicVisitor; +import org.apache.flink.table.catalog.UnresolvedIdentifier; + +import java.util.ArrayList; +import java.util.List; + +/** + * @description: FunctionVisitor + * @author: HamaWhite + */ +public class FunctionVisitor extends SqlBasicVisitor { + + private final List functionList = new ArrayList<>(); + + @Override + public Void visit(SqlCall call) { + if (call instanceof SqlBasicCall && call.getOperator() instanceof SqlFunction) { + SqlFunction function = (SqlFunction) call.getOperator(); + SqlIdentifier opName = function.getNameAsId(); + + functionList.add(UnresolvedIdentifier.of(opName.names)); + } + return super.visit(call); + } + + public List getFunctionList() { + return functionList; + } +} diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java index 577c515bc7..daf07362b8 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/utils/LineageContext.java @@ -19,26 +19,36 @@ package org.dinky.utils; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.metadata.RelColumnOrigin; import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.sql.SqlNode; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; +import org.apache.flink.table.catalog.FunctionCatalog; +import org.apache.flink.table.catalog.UnresolvedIdentifier; import org.apache.flink.table.operations.CatalogSinkModifyOperation; import org.apache.flink.table.operations.Operation; +import org.apache.flink.table.planner.delegation.ParserImpl; +import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * LineageContext * @@ -46,13 +56,15 @@ */ public class LineageContext { + private static final Logger LOG = LoggerFactory.getLogger(LineageContext.class); + private final TableEnvironmentImpl tableEnv; public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } - public List getLineage(String statement) { + public List analyzeLineage(String statement) { // 1. Generate original relNode tree Tuple2 parsed = parseStatement(statement); String sinkTable = parsed.getField(0); @@ -130,4 +142,49 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe } return resultList; } + + /** + * Analyze custom functions from SQL, does not contain system functions. + * + * @param singleSql the SQL statement to analyze + * @return custom functions set + */ + public Set analyzeFunction(String singleSql) { + LOG.info("Analyze function Sql: \n {}", singleSql); + ParserImpl parser = (ParserImpl) tableEnv.getParser(); + + // parsing sql and return the abstract syntax tree + SqlNode sqlNode = parser.parseSql(singleSql); + + // validate the query + SqlNode validated = parser.validate(sqlNode); + + // look for all functions + FunctionVisitor visitor = new FunctionVisitor(); + validated.accept(visitor); + List fullFunctionList = visitor.getFunctionList(); + + // filter custom functions + Set resultSet = new HashSet<>(); + for (UnresolvedIdentifier unresolvedIdentifier : fullFunctionList) { + getFunctionCatalog() + .lookupFunction(unresolvedIdentifier) + // the objectIdentifier of the built-in function is null + .flatMap(e -> e.getFunctionIdentifier().getIdentifier()) + .ifPresent(identifier -> { + FunctionResult functionResult = new FunctionResult() + .setCatalogName(identifier.getCatalogName()) + .setDatabase(identifier.getDatabaseName()) + .setFunctionName(identifier.getObjectName()); + LOG.debug("analyzed function: {}", functionResult); + resultSet.add(functionResult); + }); + } + return resultSet; + } + + private FunctionCatalog getFunctionCatalog() { + PlannerBase planner = (PlannerBase) tableEnv.getPlanner(); + return planner.getFlinkContext().getFunctionCatalog(); + } } diff --git a/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java index fa22d6eec6..2c934de91a 100644 --- a/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java +++ b/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/LineageContextTest.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; import org.apache.flink.configuration.Configuration; @@ -30,8 +31,7 @@ import org.apache.flink.table.api.internal.TableEnvironmentImpl; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import java.util.Set; import org.junit.Before; import org.junit.BeforeClass; @@ -43,7 +43,12 @@ */ public class LineageContextTest { + private static final String CATALOG_NAME = "default_catalog"; + + private static final String DEFAULT_DATABASE = "default_database"; + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; @BeforeClass @@ -76,11 +81,14 @@ public void init() { + ") WITH ( " + " 'connector' = 'print' " + ")"); + // Create custom function my_suffix_udf + tableEnv.executeSql("DROP FUNCTION IF EXISTS my_suffix_udf"); + tableEnv.executeSql("CREATE FUNCTION IF NOT EXISTS my_suffix_udf " + "AS 'org.dinky.utils.MySuffixFunction'"); } @Test - public void testGetLineage() { - List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + public void testAnalyzeLineage() { + String sql = "INSERT INTO TT SELECT a||c A ,b||c B FROM ST"; String[][] expectedArray = { {"ST", "a", "TT", "A", "||(a, c)"}, {"ST", "c", "TT", "A", "||(a, c)"}, @@ -88,25 +96,32 @@ public void testGetLineage() { {"ST", "c", "TT", "B", "||(b, c)"} }; - List expectedList = buildResult(expectedArray); + analyzeLineage(sql, expectedArray); + } + + @Test + public void testAnalyzeLineageAndFunction() { + String sql = "INSERT INTO TT SELECT LOWER(a) , my_suffix_udf(b) FROM ST"; + + String[][] expectedArray = { + {"ST", "a", "TT", "A", "LOWER(a)"}, + {"ST", "b", "TT", "B", "my_suffix_udf(b)"} + }; + + analyzeLineage(sql, expectedArray); + + analyzeFunction(sql, new String[] {"my_suffix_udf"}); + } + + private void analyzeLineage(String sql, String[][] expectedArray) { + List actualList = context.analyzeLineage(sql); + List expectedList = LineageRel.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); assertEquals(expectedList, actualList); } - private List buildResult(String[][] expectedArray) { - return Stream.of(expectedArray) - .map(e -> { - String transform = e.length == 5 ? e[4] : null; - return new LineageRel( - "default_catalog", - "default_database", - e[0], - e[1], - "default_catalog", - "default_database", - e[2], - e[3], - transform); - }) - .collect(Collectors.toList()); + private void analyzeFunction(String sql, String[] expectedArray) { + Set actualSet = context.analyzeFunction(sql); + Set expectedSet = FunctionResult.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); + assertEquals(expectedSet, actualSet); } } diff --git a/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/MySuffixFunction.java b/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/MySuffixFunction.java new file mode 100644 index 0000000000..ba2c5ac73e --- /dev/null +++ b/dinky-client/dinky-client-1.14/src/test/java/org/dinky/utils/MySuffixFunction.java @@ -0,0 +1,33 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.flink.table.functions.ScalarFunction; + +/** + * @description: MySuffixFunction + * @author: HamaWhite + */ +public class MySuffixFunction extends ScalarFunction { + + public String eval(String input) { + return input.concat("-HamaWhite"); + } +} diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/delegation/Parser.java b/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/delegation/Parser.java index 9222415033..ea3e9cd5b6 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/delegation/Parser.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/delegation/Parser.java @@ -89,4 +89,21 @@ public interface Parser { * @throws SqlParserException if an exception is thrown when parsing the statement */ SqlNode parseExpression(String sqlExpression); + + /** + * Entry point for parsing SQL and return the abstract syntax tree + * + * @param statement the SQL statement to evaluate + * @return abstract syntax tree + * @throws org.apache.flink.table.api.SqlParserException when failed to parse the statement + */ + SqlNode parseSql(String statement); + + /** + * validate the query + * + * @param sqlNode SqlNode to execute on + * @return validated sqlNode + */ + SqlNode validate(SqlNode sqlNode); } diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java b/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java index ff0f878ec7..f6998e38b3 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/apache/flink/table/planner/delegation/ParserImpl.java @@ -151,4 +151,18 @@ public CatalogManager getCatalogManager() { public SqlNode parseExpression(String sqlExpression) { return calciteParserSupplier.get().parseExpression(sqlExpression); } + + @Override + public SqlNode parseSql(String statement) { + CalciteParser parser = calciteParserSupplier.get(); + + // parse the sql query + return parser.parse(statement); + } + + @Override + public SqlNode validate(SqlNode sqlNode) { + FlinkPlannerImpl flinkPlanner = validatorSupplier.get(); + return flinkPlanner.validate(sqlNode); + } } diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index a3320dd770..b02ea33f0d 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -319,7 +319,7 @@ public void createTemporaryView(String path, DataStream dataStream, Strin @Override public List getLineage(String statement) { LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); - return lineageContext.getLineage(statement); + return lineageContext.analyzeLineage(statement); } @Override diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FunctionVisitor.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FunctionVisitor.java new file mode 100644 index 0000000000..bdda641b0c --- /dev/null +++ b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/FunctionVisitor.java @@ -0,0 +1,54 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlFunction; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.util.SqlBasicVisitor; +import org.apache.flink.table.catalog.UnresolvedIdentifier; + +import java.util.ArrayList; +import java.util.List; + +/** + * @description: FunctionVisitor + * @author: HamaWhite + */ +public class FunctionVisitor extends SqlBasicVisitor { + + private final List functionList = new ArrayList<>(); + + @Override + public Void visit(SqlCall call) { + if (call instanceof SqlBasicCall && call.getOperator() instanceof SqlFunction) { + SqlFunction function = (SqlFunction) call.getOperator(); + SqlIdentifier opName = function.getNameAsId(); + + functionList.add(UnresolvedIdentifier.of(opName.names)); + } + return super.visit(call); + } + + public List getFunctionList() { + return functionList; + } +} diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java index d707ade42a..059cbf0d71 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/utils/LineageContext.java @@ -19,26 +19,38 @@ package org.dinky.utils; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.metadata.RelColumnOrigin; import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.sql.SqlNode; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.internal.TableEnvironmentImpl; +import org.apache.flink.table.catalog.ContextResolvedFunction; +import org.apache.flink.table.catalog.FunctionCatalog; +import org.apache.flink.table.catalog.UnresolvedIdentifier; +import org.apache.flink.table.functions.FunctionIdentifier; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; +import org.apache.flink.table.planner.delegation.ParserImpl; +import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * LineageContext * @@ -46,13 +58,15 @@ */ public class LineageContext { + private static final Logger LOG = LoggerFactory.getLogger(LineageContext.class); + private final TableEnvironmentImpl tableEnv; public LineageContext(TableEnvironmentImpl tableEnv) { this.tableEnv = tableEnv; } - public List getLineage(String statement) { + public List analyzeLineage(String statement) { // 1. Generate original relNode tree Tuple2 parsed = parseStatement(statement); String sinkTable = parsed.getField(0); @@ -131,4 +145,50 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe } return resultList; } + + /** + * Analyze custom functions from SQL, does not contain system functions. + * + * @param singleSql the SQL statement to analyze + * @return custom functions set + */ + public Set analyzeFunction(String singleSql) { + LOG.info("Analyze function Sql: \n {}", singleSql); + ParserImpl parser = (ParserImpl) tableEnv.getParser(); + + // parsing sql and return the abstract syntax tree + SqlNode sqlNode = parser.parseSql(singleSql); + + // validate the query + SqlNode validated = parser.validate(sqlNode); + + // look for all functions + FunctionVisitor visitor = new FunctionVisitor(); + validated.accept(visitor); + List fullFunctionList = visitor.getFunctionList(); + + // filter custom functions + Set resultSet = new HashSet<>(); + for (UnresolvedIdentifier unresolvedIdentifier : fullFunctionList) { + getFunctionCatalog() + .lookupFunction(unresolvedIdentifier) + .flatMap(ContextResolvedFunction::getIdentifier) + // the objectIdentifier of the built-in function is null + .flatMap(FunctionIdentifier::getIdentifier) + .ifPresent(identifier -> { + FunctionResult functionResult = new FunctionResult() + .setCatalogName(identifier.getCatalogName()) + .setDatabase(identifier.getDatabaseName()) + .setFunctionName(identifier.getObjectName()); + LOG.debug("analyzed function: {}", functionResult); + resultSet.add(functionResult); + }); + } + return resultSet; + } + + private FunctionCatalog getFunctionCatalog() { + PlannerBase planner = (PlannerBase) tableEnv.getPlanner(); + return planner.getFlinkContext().getFunctionCatalog(); + } } diff --git a/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java index fa22d6eec6..2c934de91a 100644 --- a/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java +++ b/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/LineageContextTest.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; import org.apache.flink.configuration.Configuration; @@ -30,8 +31,7 @@ import org.apache.flink.table.api.internal.TableEnvironmentImpl; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import java.util.Set; import org.junit.Before; import org.junit.BeforeClass; @@ -43,7 +43,12 @@ */ public class LineageContextTest { + private static final String CATALOG_NAME = "default_catalog"; + + private static final String DEFAULT_DATABASE = "default_database"; + private static TableEnvironmentImpl tableEnv; + private static LineageContext context; @BeforeClass @@ -76,11 +81,14 @@ public void init() { + ") WITH ( " + " 'connector' = 'print' " + ")"); + // Create custom function my_suffix_udf + tableEnv.executeSql("DROP FUNCTION IF EXISTS my_suffix_udf"); + tableEnv.executeSql("CREATE FUNCTION IF NOT EXISTS my_suffix_udf " + "AS 'org.dinky.utils.MySuffixFunction'"); } @Test - public void testGetLineage() { - List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + public void testAnalyzeLineage() { + String sql = "INSERT INTO TT SELECT a||c A ,b||c B FROM ST"; String[][] expectedArray = { {"ST", "a", "TT", "A", "||(a, c)"}, {"ST", "c", "TT", "A", "||(a, c)"}, @@ -88,25 +96,32 @@ public void testGetLineage() { {"ST", "c", "TT", "B", "||(b, c)"} }; - List expectedList = buildResult(expectedArray); + analyzeLineage(sql, expectedArray); + } + + @Test + public void testAnalyzeLineageAndFunction() { + String sql = "INSERT INTO TT SELECT LOWER(a) , my_suffix_udf(b) FROM ST"; + + String[][] expectedArray = { + {"ST", "a", "TT", "A", "LOWER(a)"}, + {"ST", "b", "TT", "B", "my_suffix_udf(b)"} + }; + + analyzeLineage(sql, expectedArray); + + analyzeFunction(sql, new String[] {"my_suffix_udf"}); + } + + private void analyzeLineage(String sql, String[][] expectedArray) { + List actualList = context.analyzeLineage(sql); + List expectedList = LineageRel.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); assertEquals(expectedList, actualList); } - private List buildResult(String[][] expectedArray) { - return Stream.of(expectedArray) - .map(e -> { - String transform = e.length == 5 ? e[4] : null; - return new LineageRel( - "default_catalog", - "default_database", - e[0], - e[1], - "default_catalog", - "default_database", - e[2], - e[3], - transform); - }) - .collect(Collectors.toList()); + private void analyzeFunction(String sql, String[] expectedArray) { + Set actualSet = context.analyzeFunction(sql); + Set expectedSet = FunctionResult.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); + assertEquals(expectedSet, actualSet); } } diff --git a/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/MySuffixFunction.java b/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/MySuffixFunction.java new file mode 100644 index 0000000000..ba2c5ac73e --- /dev/null +++ b/dinky-client/dinky-client-1.15/src/test/java/org/dinky/utils/MySuffixFunction.java @@ -0,0 +1,33 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.flink.table.functions.ScalarFunction; + +/** + * @description: MySuffixFunction + * @author: HamaWhite + */ +public class MySuffixFunction extends ScalarFunction { + + public String eval(String input) { + return input.concat("-HamaWhite"); + } +} diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index d8547b223d..bffc299814 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -39,7 +39,6 @@ import org.apache.flink.table.api.ExplainDetail; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.table.api.internal.TableEnvironmentImpl; import org.apache.flink.table.operations.CreateTableASOperation; import org.apache.flink.table.operations.ExplainOperation; import org.apache.flink.table.operations.ModifyOperation; @@ -255,8 +254,8 @@ private void setConfiguration(StreamExecutionEnvironment environment, Map getLineage(String statement) { - LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); - return lineageContext.getLineage(statement); + LineageContext lineageContext = new LineageContext(this); + return lineageContext.analyzeLineage(statement); } @Override diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ExtendedParser.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ExtendedParser.java index 42ffd70783..e19ab6baa8 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ExtendedParser.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ExtendedParser.java @@ -19,9 +19,17 @@ package org.dinky.executor; +import org.apache.calcite.sql.SqlNode; import org.apache.flink.table.delegation.Parser; /** */ public interface ExtendedParser extends Parser { + + SqlNode parseExpression(String sqlExpression); + + SqlNode parseSql(String statement); + + SqlNode validate(SqlNode sqlNode); + CustomParser getCustomParser(); } diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ParserWrapper.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ParserWrapper.java index 922ebf62ea..0f76daee30 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ParserWrapper.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/ParserWrapper.java @@ -19,6 +19,7 @@ package org.dinky.executor; +import org.apache.calcite.sql.SqlNode; import org.apache.flink.table.catalog.UnresolvedIdentifier; import org.apache.flink.table.expressions.ResolvedExpression; import org.apache.flink.table.operations.Operation; @@ -63,6 +64,21 @@ public String[] getCompletionHints(String statement, int position) { return customParser.getParser().getCompletionHints(statement, position); } + @Override + public SqlNode parseExpression(String sqlExpression) { + return customParser.parseExpression(sqlExpression); + } + + @Override + public SqlNode parseSql(String statement) { + return customParser.parseSql(statement); + } + + @Override + public SqlNode validate(SqlNode sqlNode) { + return customParser.validate(sqlNode); + } + @Override public CustomParser getCustomParser() { return customParser; diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FunctionVisitor.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FunctionVisitor.java new file mode 100644 index 0000000000..bdda641b0c --- /dev/null +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/FunctionVisitor.java @@ -0,0 +1,54 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlFunction; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.util.SqlBasicVisitor; +import org.apache.flink.table.catalog.UnresolvedIdentifier; + +import java.util.ArrayList; +import java.util.List; + +/** + * @description: FunctionVisitor + * @author: HamaWhite + */ +public class FunctionVisitor extends SqlBasicVisitor { + + private final List functionList = new ArrayList<>(); + + @Override + public Void visit(SqlCall call) { + if (call instanceof SqlBasicCall && call.getOperator() instanceof SqlFunction) { + SqlFunction function = (SqlFunction) call.getOperator(); + SqlIdentifier opName = function.getNameAsId(); + + functionList.add(UnresolvedIdentifier.of(opName.names)); + } + return super.visit(call); + } + + public List getFunctionList() { + return functionList; + } +} diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java index d707ade42a..a29c8ea144 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/utils/LineageContext.java @@ -19,26 +19,40 @@ package org.dinky.utils; +import org.dinky.context.CustomTableEnvironmentContext; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; +import org.dinky.executor.CustomParser; +import org.dinky.executor.CustomTableEnvironment; +import org.dinky.executor.ExtendedParser; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.metadata.RelColumnOrigin; import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.sql.SqlNode; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; -import org.apache.flink.table.api.internal.TableEnvironmentImpl; +import org.apache.flink.table.catalog.ContextResolvedFunction; +import org.apache.flink.table.catalog.FunctionCatalog; +import org.apache.flink.table.catalog.UnresolvedIdentifier; +import org.apache.flink.table.functions.FunctionIdentifier; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; +import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * LineageContext * @@ -46,13 +60,15 @@ */ public class LineageContext { - private final TableEnvironmentImpl tableEnv; + private static final Logger LOG = LoggerFactory.getLogger(LineageContext.class); + + private final CustomTableEnvironment tableEnv; - public LineageContext(TableEnvironmentImpl tableEnv) { + public LineageContext(CustomTableEnvironment tableEnv) { this.tableEnv = tableEnv; } - public List getLineage(String statement) { + public List analyzeLineage(String statement) { // 1. Generate original relNode tree Tuple2 parsed = parseStatement(statement); String sinkTable = parsed.getField(0); @@ -131,4 +147,59 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe } return resultList; } + + /** + * Analyze custom functions from SQL, does not contain system functions. + * + * @param singleSql the SQL statement to analyze + * @return custom functions set + */ + public Set analyzeFunction(String singleSql) { + LOG.info("Analyze function Sql: \n {}", singleSql); + CustomParser parser = null; + // ((ExtendedParser) tableEnv.getParser()).getCustomParser(); + + if (CustomTableEnvironmentContext.get().getParser() instanceof ExtendedParser) { + ExtendedParser extendedParser = + (ExtendedParser) CustomTableEnvironmentContext.get().getParser(); + parser = extendedParser.getCustomParser(); + } else { + throw new RuntimeException("CustomParser is not set"); + } + + // parsing sql and return the abstract syntax tree + SqlNode sqlNode = parser.parseSql(singleSql); + + // validate the query + SqlNode validated = parser.validate(sqlNode); + + // look for all functions + FunctionVisitor visitor = new FunctionVisitor(); + validated.accept(visitor); + List fullFunctionList = visitor.getFunctionList(); + + // filter custom functions + Set resultSet = new HashSet<>(); + for (UnresolvedIdentifier unresolvedIdentifier : fullFunctionList) { + getFunctionCatalog() + .lookupFunction(unresolvedIdentifier) + .flatMap(ContextResolvedFunction::getIdentifier) + // the objectIdentifier of the built-in function is null + .flatMap(FunctionIdentifier::getIdentifier) + .ifPresent(identifier -> { + FunctionResult functionResult = new FunctionResult() + .setCatalogName(identifier.getCatalogName()) + .setDatabase(identifier.getDatabaseName()) + .setFunctionName(identifier.getObjectName()); + LOG.debug("analyzed function: {}", functionResult); + resultSet.add(functionResult); + }); + } + return resultSet; + } + + private FunctionCatalog getFunctionCatalog() { + PlannerBase planner = (PlannerBase) tableEnv.getPlanner(); + return planner.getFlinkContext().getFunctionCatalog(); + } } diff --git a/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java index fa22d6eec6..d14c1f6b9d 100644 --- a/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java +++ b/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/LineageContextTest.java @@ -21,20 +21,20 @@ import static org.junit.Assert.assertEquals; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; +import org.dinky.executor.CustomTableEnvironmentImpl; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.table.api.internal.TableEnvironmentImpl; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import java.util.Set; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; /** @@ -43,7 +43,12 @@ */ public class LineageContextTest { - private static TableEnvironmentImpl tableEnv; + private static final String CATALOG_NAME = "default_catalog"; + + private static final String DEFAULT_DATABASE = "default_database"; + + private static CustomTableEnvironmentImpl tableEnv; + private static LineageContext context; @BeforeClass @@ -52,7 +57,7 @@ public static void setUp() { EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build(); - tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + tableEnv = CustomTableEnvironmentImpl.create(env, settings); context = new LineageContext(tableEnv); } @@ -76,11 +81,14 @@ public void init() { + ") WITH ( " + " 'connector' = 'print' " + ")"); + // Create custom function my_suffix_udf + tableEnv.executeSql("DROP FUNCTION IF EXISTS my_suffix_udf"); + tableEnv.executeSql("CREATE FUNCTION IF NOT EXISTS my_suffix_udf " + "AS 'org.dinky.utils.MySuffixFunction'"); } @Test - public void testGetLineage() { - List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + public void testAnalyzeLineage() { + String sql = "INSERT INTO TT SELECT a||c A ,b||c B FROM ST"; String[][] expectedArray = { {"ST", "a", "TT", "A", "||(a, c)"}, {"ST", "c", "TT", "A", "||(a, c)"}, @@ -88,25 +96,33 @@ public void testGetLineage() { {"ST", "c", "TT", "B", "||(b, c)"} }; - List expectedList = buildResult(expectedArray); + analyzeLineage(sql, expectedArray); + } + + @Ignore + @Test + public void testAnalyzeLineageAndFunction() { + String sql = "INSERT INTO TT SELECT LOWER(a) , my_suffix_udf(b) FROM ST"; + + String[][] expectedArray = { + {"ST", "a", "TT", "A", "LOWER(a)"}, + {"ST", "b", "TT", "B", "my_suffix_udf(b)"} + }; + + analyzeLineage(sql, expectedArray); + + analyzeFunction(sql, new String[] {"my_suffix_udf"}); + } + + private void analyzeLineage(String sql, String[][] expectedArray) { + List actualList = context.analyzeLineage(sql); + List expectedList = LineageRel.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); assertEquals(expectedList, actualList); } - private List buildResult(String[][] expectedArray) { - return Stream.of(expectedArray) - .map(e -> { - String transform = e.length == 5 ? e[4] : null; - return new LineageRel( - "default_catalog", - "default_database", - e[0], - e[1], - "default_catalog", - "default_database", - e[2], - e[3], - transform); - }) - .collect(Collectors.toList()); + private void analyzeFunction(String sql, String[] expectedArray) { + Set actualSet = context.analyzeFunction(sql); + Set expectedSet = FunctionResult.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); + assertEquals(expectedSet, actualSet); } } diff --git a/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/MySuffixFunction.java b/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/MySuffixFunction.java new file mode 100644 index 0000000000..ba2c5ac73e --- /dev/null +++ b/dinky-client/dinky-client-1.16/src/test/java/org/dinky/utils/MySuffixFunction.java @@ -0,0 +1,33 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.flink.table.functions.ScalarFunction; + +/** + * @description: MySuffixFunction + * @author: HamaWhite + */ +public class MySuffixFunction extends ScalarFunction { + + public String eval(String input) { + return input.concat("-HamaWhite"); + } +} diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 6cc8e992ba..80723a5a00 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -40,7 +40,6 @@ import org.apache.flink.table.api.ExplainFormat; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.table.api.internal.TableEnvironmentImpl; import org.apache.flink.table.operations.CreateTableASOperation; import org.apache.flink.table.operations.ExplainOperation; import org.apache.flink.table.operations.ModifyOperation; @@ -251,8 +250,8 @@ private void setConfiguration(StreamExecutionEnvironment environment, Map getLineage(String statement) { - LineageContext lineageContext = new LineageContext((TableEnvironmentImpl) streamTableEnvironment); - return lineageContext.getLineage(statement); + LineageContext lineageContext = new LineageContext(this); + return lineageContext.analyzeLineage(statement); } @Override diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ExtendedParser.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ExtendedParser.java index 42ffd70783..e19ab6baa8 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ExtendedParser.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ExtendedParser.java @@ -19,9 +19,17 @@ package org.dinky.executor; +import org.apache.calcite.sql.SqlNode; import org.apache.flink.table.delegation.Parser; /** */ public interface ExtendedParser extends Parser { + + SqlNode parseExpression(String sqlExpression); + + SqlNode parseSql(String statement); + + SqlNode validate(SqlNode sqlNode); + CustomParser getCustomParser(); } diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ParserWrapper.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ParserWrapper.java index 922ebf62ea..0f76daee30 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ParserWrapper.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/ParserWrapper.java @@ -19,6 +19,7 @@ package org.dinky.executor; +import org.apache.calcite.sql.SqlNode; import org.apache.flink.table.catalog.UnresolvedIdentifier; import org.apache.flink.table.expressions.ResolvedExpression; import org.apache.flink.table.operations.Operation; @@ -63,6 +64,21 @@ public String[] getCompletionHints(String statement, int position) { return customParser.getParser().getCompletionHints(statement, position); } + @Override + public SqlNode parseExpression(String sqlExpression) { + return customParser.parseExpression(sqlExpression); + } + + @Override + public SqlNode parseSql(String statement) { + return customParser.parseSql(statement); + } + + @Override + public SqlNode validate(SqlNode sqlNode) { + return customParser.validate(sqlNode); + } + @Override public CustomParser getCustomParser() { return customParser; diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FunctionVisitor.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FunctionVisitor.java new file mode 100644 index 0000000000..bdda641b0c --- /dev/null +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/FunctionVisitor.java @@ -0,0 +1,54 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlCall; +import org.apache.calcite.sql.SqlFunction; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.util.SqlBasicVisitor; +import org.apache.flink.table.catalog.UnresolvedIdentifier; + +import java.util.ArrayList; +import java.util.List; + +/** + * @description: FunctionVisitor + * @author: HamaWhite + */ +public class FunctionVisitor extends SqlBasicVisitor { + + private final List functionList = new ArrayList<>(); + + @Override + public Void visit(SqlCall call) { + if (call instanceof SqlBasicCall && call.getOperator() instanceof SqlFunction) { + SqlFunction function = (SqlFunction) call.getOperator(); + SqlIdentifier opName = function.getNameAsId(); + + functionList.add(UnresolvedIdentifier.of(opName.names)); + } + return super.visit(call); + } + + public List getFunctionList() { + return functionList; + } +} diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java index d707ade42a..c924b9c86d 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/utils/LineageContext.java @@ -19,26 +19,38 @@ package org.dinky.utils; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; +import org.dinky.executor.CustomParser; +import org.dinky.executor.CustomTableEnvironment; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.metadata.RelColumnOrigin; import org.apache.calcite.rel.metadata.RelMetadataQuery; +import org.apache.calcite.sql.SqlNode; import org.apache.commons.collections.CollectionUtils; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.ValidationException; -import org.apache.flink.table.api.internal.TableEnvironmentImpl; +import org.apache.flink.table.catalog.ContextResolvedFunction; +import org.apache.flink.table.catalog.FunctionCatalog; +import org.apache.flink.table.catalog.UnresolvedIdentifier; +import org.apache.flink.table.functions.FunctionIdentifier; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.SinkModifyOperation; +import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.operations.PlannerQueryOperation; import org.apache.flink.table.planner.plan.schema.TableSourceTable; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * LineageContext * @@ -46,13 +58,15 @@ */ public class LineageContext { - private final TableEnvironmentImpl tableEnv; + private static final Logger LOG = LoggerFactory.getLogger(LineageContext.class); + + private final CustomTableEnvironment tableEnv; - public LineageContext(TableEnvironmentImpl tableEnv) { + public LineageContext(CustomTableEnvironment tableEnv) { this.tableEnv = tableEnv; } - public List getLineage(String statement) { + public List analyzeLineage(String statement) { // 1. Generate original relNode tree Tuple2 parsed = parseStatement(statement); String sinkTable = parsed.getField(0); @@ -131,4 +145,50 @@ private List buildFiledLineageResult(String sinkTable, RelNode optRe } return resultList; } + + /** + * Analyze custom functions from SQL, does not contain system functions. + * + * @param singleSql the SQL statement to analyze + * @return custom functions set + */ + public Set analyzeFunction(String singleSql) { + LOG.info("Analyze function Sql: \n {}", singleSql); + CustomParser parser = (CustomParser) tableEnv.getParser(); + + // parsing sql and return the abstract syntax tree + SqlNode sqlNode = parser.parseSql(singleSql); + + // validate the query + SqlNode validated = parser.validate(sqlNode); + + // look for all functions + FunctionVisitor visitor = new FunctionVisitor(); + validated.accept(visitor); + List fullFunctionList = visitor.getFunctionList(); + + // filter custom functions + Set resultSet = new HashSet<>(); + for (UnresolvedIdentifier unresolvedIdentifier : fullFunctionList) { + getFunctionCatalog() + .lookupFunction(unresolvedIdentifier) + .flatMap(ContextResolvedFunction::getIdentifier) + // the objectIdentifier of the built-in function is null + .flatMap(FunctionIdentifier::getIdentifier) + .ifPresent(identifier -> { + FunctionResult functionResult = new FunctionResult() + .setCatalogName(identifier.getCatalogName()) + .setDatabase(identifier.getDatabaseName()) + .setFunctionName(identifier.getObjectName()); + LOG.debug("analyzed function: {}", functionResult); + resultSet.add(functionResult); + }); + } + return resultSet; + } + + private FunctionCatalog getFunctionCatalog() { + PlannerBase planner = (PlannerBase) tableEnv.getPlanner(); + return planner.getFlinkContext().getFunctionCatalog(); + } } diff --git a/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java b/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java index fa22d6eec6..d14c1f6b9d 100644 --- a/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java +++ b/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/LineageContextTest.java @@ -21,20 +21,20 @@ import static org.junit.Assert.assertEquals; +import org.dinky.data.model.FunctionResult; import org.dinky.data.model.LineageRel; +import org.dinky.executor.CustomTableEnvironmentImpl; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.table.api.internal.TableEnvironmentImpl; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import java.util.Set; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; /** @@ -43,7 +43,12 @@ */ public class LineageContextTest { - private static TableEnvironmentImpl tableEnv; + private static final String CATALOG_NAME = "default_catalog"; + + private static final String DEFAULT_DATABASE = "default_database"; + + private static CustomTableEnvironmentImpl tableEnv; + private static LineageContext context; @BeforeClass @@ -52,7 +57,7 @@ public static void setUp() { EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build(); - tableEnv = (TableEnvironmentImpl) StreamTableEnvironment.create(env, settings); + tableEnv = CustomTableEnvironmentImpl.create(env, settings); context = new LineageContext(tableEnv); } @@ -76,11 +81,14 @@ public void init() { + ") WITH ( " + " 'connector' = 'print' " + ")"); + // Create custom function my_suffix_udf + tableEnv.executeSql("DROP FUNCTION IF EXISTS my_suffix_udf"); + tableEnv.executeSql("CREATE FUNCTION IF NOT EXISTS my_suffix_udf " + "AS 'org.dinky.utils.MySuffixFunction'"); } @Test - public void testGetLineage() { - List actualList = context.getLineage("INSERT INTO TT select a||c A ,b||c B from ST"); + public void testAnalyzeLineage() { + String sql = "INSERT INTO TT SELECT a||c A ,b||c B FROM ST"; String[][] expectedArray = { {"ST", "a", "TT", "A", "||(a, c)"}, {"ST", "c", "TT", "A", "||(a, c)"}, @@ -88,25 +96,33 @@ public void testGetLineage() { {"ST", "c", "TT", "B", "||(b, c)"} }; - List expectedList = buildResult(expectedArray); + analyzeLineage(sql, expectedArray); + } + + @Ignore + @Test + public void testAnalyzeLineageAndFunction() { + String sql = "INSERT INTO TT SELECT LOWER(a) , my_suffix_udf(b) FROM ST"; + + String[][] expectedArray = { + {"ST", "a", "TT", "A", "LOWER(a)"}, + {"ST", "b", "TT", "B", "my_suffix_udf(b)"} + }; + + analyzeLineage(sql, expectedArray); + + analyzeFunction(sql, new String[] {"my_suffix_udf"}); + } + + private void analyzeLineage(String sql, String[][] expectedArray) { + List actualList = context.analyzeLineage(sql); + List expectedList = LineageRel.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); assertEquals(expectedList, actualList); } - private List buildResult(String[][] expectedArray) { - return Stream.of(expectedArray) - .map(e -> { - String transform = e.length == 5 ? e[4] : null; - return new LineageRel( - "default_catalog", - "default_database", - e[0], - e[1], - "default_catalog", - "default_database", - e[2], - e[3], - transform); - }) - .collect(Collectors.toList()); + private void analyzeFunction(String sql, String[] expectedArray) { + Set actualSet = context.analyzeFunction(sql); + Set expectedSet = FunctionResult.build(CATALOG_NAME, DEFAULT_DATABASE, expectedArray); + assertEquals(expectedSet, actualSet); } } diff --git a/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/MySuffixFunction.java b/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/MySuffixFunction.java new file mode 100644 index 0000000000..ba2c5ac73e --- /dev/null +++ b/dinky-client/dinky-client-1.17/src/test/java/org/dinky/utils/MySuffixFunction.java @@ -0,0 +1,33 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.apache.flink.table.functions.ScalarFunction; + +/** + * @description: MySuffixFunction + * @author: HamaWhite + */ +public class MySuffixFunction extends ScalarFunction { + + public String eval(String input) { + return input.concat("-HamaWhite"); + } +} diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/FunctionResult.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/FunctionResult.java new file mode 100644 index 0000000000..d8d14f0b6d --- /dev/null +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/FunctionResult.java @@ -0,0 +1,52 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.data.model; + +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.experimental.Accessors; + +/** + * @description: FunctionResult + * @author: HamaWhite + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +@Accessors(chain = true) +public class FunctionResult { + + private String catalogName; + + private String database; + + private String functionName; + + public static Set build(String catalog, String database, String[] expectedArray) { + return Stream.of(expectedArray) + .map(e -> new FunctionResult(catalog, database, e)) + .collect(Collectors.toSet()); + } +} diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java index befb85f874..825a425f4b 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/data/model/LineageRel.java @@ -19,7 +19,10 @@ package org.dinky.data.model; +import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * LineageResult @@ -73,6 +76,23 @@ public LineageRel( this.transform = transform; } + public LineageRel( + String catalog, + String database, + String sourceTable, + String sourceColumn, + String targetTable, + String targetColumn) { + this.sourceCatalog = catalog; + this.sourceDatabase = database; + this.sourceTable = sourceTable; + this.sourceColumn = sourceColumn; + this.targetCatalog = catalog; + this.targetDatabase = database; + this.targetTable = targetTable; + this.targetColumn = targetColumn; + } + public static LineageRel build( String sourceTablePath, String sourceColumn, @@ -94,26 +114,17 @@ public static LineageRel build( transform); } - public static LineageRel build( - String sourceCatalog, - String sourceDatabase, - String sourceTable, - String sourceColumn, - String targetCatalog, - String targetDatabase, - String targetTable, - String targetColumn, - String transform) { - return new LineageRel( - sourceCatalog, - sourceDatabase, - sourceTable, - sourceColumn, - targetCatalog, - targetDatabase, - targetTable, - targetColumn, - transform); + public static List build(String catalog, String database, String[][] expectedArray) { + return Stream.of(expectedArray) + .map(e -> { + LineageRel lineageRel = new LineageRel(catalog, database, e[0], e[1], e[2], e[3]); + // transform field is optional + if (e.length == 5) { + lineageRel.setTransform(e[4]); + } + return lineageRel; + }) + .collect(Collectors.toList()); } public String getSourceCatalog() { @@ -160,6 +171,10 @@ public String getTransform() { return transform; } + public void setTransform(String transform) { + this.transform = transform; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomParser.java index bba13dcadd..ad2a896851 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomParser.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomParser.java @@ -31,4 +31,21 @@ public interface CustomParser { Parser getParser(); SqlNode parseExpression(String sqlExpression); + + /** + * Entry point for parsing a SQL and return the abstract syntax tree + * + * @param statement the SQL statement to evaluate + * @return abstract syntax tree + * @throws org.apache.flink.table.api.SqlParserException when failed to parse the statement + */ + SqlNode parseSql(String statement); + + /** + * validate the query + * + * @param sqlNode SqlNode to execute on + * @return validated sqlNode + */ + SqlNode validate(SqlNode sqlNode); } diff --git a/dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java b/dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java index 04184ac334..adb107bbfe 100644 --- a/dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java +++ b/dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java @@ -25,6 +25,7 @@ import org.apache.calcite.sql.SqlNode; import org.apache.flink.table.delegation.Parser; import org.apache.flink.table.operations.Operation; +import org.apache.flink.table.planner.calcite.FlinkPlannerImpl; import org.apache.flink.table.planner.delegation.ParserImpl; import org.apache.flink.table.planner.parse.CalciteParser; import org.apache.flink.table.planner.parse.ExtendedParseStrategy; @@ -42,11 +43,13 @@ public class CustomParserImpl implements CustomParser { private static final DinkyExtendedParser DINKY_EXTENDED_PARSER = DinkyExtendedParser.INSTANCE; private final Parser parser; + private final Supplier validatorSupplier; private final Supplier calciteParserSupplier; public CustomParserImpl(Parser parser) { this.parser = parser; this.calciteParserSupplier = getCalciteParserSupplier(this.parser); + this.validatorSupplier = getValidatorSupplier(this.parser); } public static Supplier getCalciteParserSupplier(Parser parser) { @@ -59,6 +62,16 @@ public static Supplier getCalciteParserSupplier(Parser parser) { } } + public static Supplier getValidatorSupplier(Parser parser) { + if (parser instanceof ParserImpl) { + ParserImpl parserImpl = (ParserImpl) parser; + return (Supplier) ReflectUtil.getFieldValue(parserImpl, "validatorSupplier"); + } else { + throw new RuntimeException("Unsupported parser type for getValidatorSupplier: " + + parser.getClass().getName()); + } + } + @Override public List parse(String statement) { Optional command = DINKY_EXTENDED_PARSER.parse(statement); @@ -77,6 +90,17 @@ public SqlNode parseExpression(String sqlExpression) { return calciteParserSupplier.get().parseExpression(sqlExpression); } + @Override + public SqlNode parseSql(String statement) { + return calciteParserSupplier.get().parse(statement); + } + + @Override + public SqlNode validate(SqlNode sqlNode) { + FlinkPlannerImpl flinkPlanner = validatorSupplier.get(); + return flinkPlanner.validate(sqlNode); + } + public static class DinkyExtendedParser extends ExtendedParser { public static final DinkyExtendedParser INSTANCE = new DinkyExtendedParser(); From 0b9aac20b1a0a9da54e03ddba38609e9edeb43df Mon Sep 17 00:00:00 2001 From: aiwenmo <32723967+aiwenmo@users.noreply.github.com> Date: Tue, 7 Nov 2023 02:59:40 +0800 Subject: [PATCH 14/21] [Fix-2125][metadata] Fix clickhouse Nullable columns (#2503) Co-authored-by: wenmo <32723967+wenmo@users.noreply.github.com> --- .../metadata/driver/ClickHouseDriver.java | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/dinky-metadata/dinky-metadata-clickhouse/src/main/java/org/dinky/metadata/driver/ClickHouseDriver.java b/dinky-metadata/dinky-metadata-clickhouse/src/main/java/org/dinky/metadata/driver/ClickHouseDriver.java index fac0e2554e..da7ec1a5a5 100644 --- a/dinky-metadata/dinky-metadata-clickhouse/src/main/java/org/dinky/metadata/driver/ClickHouseDriver.java +++ b/dinky-metadata/dinky-metadata-clickhouse/src/main/java/org/dinky/metadata/driver/ClickHouseDriver.java @@ -19,6 +19,8 @@ package org.dinky.metadata.driver; +import org.dinky.assertion.Asserts; +import org.dinky.data.model.Column; import org.dinky.data.model.Table; import org.dinky.data.result.SqlExplainResult; import org.dinky.metadata.ast.Clickhouse20CreateTableStatement; @@ -31,6 +33,8 @@ import java.sql.PreparedStatement; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -164,4 +168,98 @@ private SqlExplainResult checkDropTable(SQLDropTableStatement sqlStatement, Stri public Map getFlinkColumnTypeConversion() { return new HashMap<>(); } + + @Override + public List listColumns(String schemaName, String tableName) { + List columns = new ArrayList<>(); + PreparedStatement preparedStatement = null; + ResultSet results = null; + IDBQuery dbQuery = getDBQuery(); + String tableFieldsSql = dbQuery.columnsSql(schemaName, tableName); + try { + preparedStatement = conn.get().prepareStatement(tableFieldsSql); + results = preparedStatement.executeQuery(); + ResultSetMetaData metaData = results.getMetaData(); + List columnList = new ArrayList<>(); + for (int i = 1; i <= metaData.getColumnCount(); i++) { + columnList.add(metaData.getColumnLabel(i)); + } + while (results.next()) { + Column field = new Column(); + String columnName = results.getString(dbQuery.columnName()); + if (columnList.contains(dbQuery.columnKey())) { + String key = results.getString(dbQuery.columnKey()); + field.setKeyFlag(Asserts.isNotNullString(key) && Asserts.isEqualsIgnoreCase(dbQuery.isPK(), key)); + } + field.setName(columnName); + if (columnList.contains(dbQuery.columnType())) { + String columnType = results.getString(dbQuery.columnType()); + if (columnType.indexOf("Nullable") >= 0) { + field.setNullable(true); + columnType = columnType.replaceAll("Nullable\\(", "").replaceAll("\\)", ""); + } + if (columnType.contains("(")) { + String type = columnType.replaceAll("\\(.*\\)", ""); + if (!columnType.contains(",")) { + Integer length = Integer.valueOf(columnType.replaceAll("\\D", "")); + field.setLength(length); + } else { + // some database does not have precision + if (dbQuery.precision() != null) { + // 例如浮点类型的长度和精度是一样的,decimal(10,2) + field.setLength(results.getInt(dbQuery.precision())); + } + } + field.setType(type); + } else { + field.setType(columnType); + } + } + if (columnList.contains(dbQuery.columnComment()) + && Asserts.isNotNull(results.getString(dbQuery.columnComment()))) { + String columnComment = + results.getString(dbQuery.columnComment()).replaceAll("\"|'", ""); + field.setComment(columnComment); + } + if (columnList.contains(dbQuery.columnLength())) { + int length = results.getInt(dbQuery.columnLength()); + if (!results.wasNull()) { + field.setLength(length); + } + } + if (columnList.contains(dbQuery.characterSet())) { + field.setCharacterSet(results.getString(dbQuery.characterSet())); + } + if (columnList.contains(dbQuery.collation())) { + field.setCollation(results.getString(dbQuery.collation())); + } + if (columnList.contains(dbQuery.columnPosition())) { + field.setPosition(results.getInt(dbQuery.columnPosition())); + } + if (columnList.contains(dbQuery.precision())) { + field.setPrecision(results.getInt(dbQuery.precision())); + } + if (columnList.contains(dbQuery.scale())) { + field.setScale(results.getInt(dbQuery.scale())); + } + if (columnList.contains(dbQuery.defaultValue())) { + field.setDefaultValue(results.getString(dbQuery.defaultValue())); + } + if (columnList.contains(dbQuery.autoIncrement())) { + field.setAutoIncrement( + Asserts.isEqualsIgnoreCase(results.getString(dbQuery.autoIncrement()), "auto_increment")); + } + if (columnList.contains(dbQuery.defaultValue())) { + field.setDefaultValue(results.getString(dbQuery.defaultValue())); + } + field.setJavaType(getTypeConvert().convert(field, config)); + columns.add(field); + } + } catch (SQLException e) { + e.printStackTrace(); + } finally { + close(preparedStatement, results); + } + return columns; + } } From d64692756e505771b91476937ec32ab6a369de69 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Tue, 7 Nov 2023 17:28:50 +0800 Subject: [PATCH 15/21] [Fix][UDF]Fix udf python (#2504) * fix_udf_python --- .../java/org/dinky/function/util/UDFUtil.java | 4 +- .../src/main/resources/getPyFuncList.py | 67 +++++++++++++++---- 2 files changed, 56 insertions(+), 15 deletions(-) diff --git a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java index ad084ebc11..e08f65f260 100644 --- a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java +++ b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java @@ -394,7 +394,7 @@ public static List getPythonUdfList(String pythonPath, String udfFile) { continue; } Configuration configuration = new Configuration(); - configuration.set(PythonOptions.PYTHON_FILES, udfFile + ".zip"); + configuration.set(PythonOptions.PYTHON_FILES, udfFile); configuration.set(PythonOptions.PYTHON_CLIENT_EXECUTABLE, pythonPath); configuration.set(PythonOptions.PYTHON_EXECUTABLE, pythonPath); try { @@ -413,7 +413,7 @@ private static List execPyAndGetUdfNameList(String pyPath, String pyFile String shell = StrUtil.join(" ", Arrays.asList(Opt.ofBlankAble(pyPath).orElse("python3"), pyFile, checkPyFile)); - return StrUtil.split(RuntimeUtil.execForStr(shell), ","); + return StrUtil.split(StrUtil.trim(RuntimeUtil.execForStr(shell)), ","); } catch (Exception e) { throw new DinkyException(e); } diff --git a/dinky-function/src/main/resources/getPyFuncList.py b/dinky-function/src/main/resources/getPyFuncList.py index 231610b6d7..af8020f85d 100644 --- a/dinky-function/src/main/resources/getPyFuncList.py +++ b/dinky-function/src/main/resources/getPyFuncList.py @@ -1,7 +1,12 @@ -# -*- coding: utf-8 -*- +import hashlib import importlib +import os import sys -import platform +import uuid +import zipfile +import appdirs +import shutil + import pyflink # import ast @@ -41,9 +46,6 @@ # except Exception as e: # pass - -import os - if len(sys.argv) < 2: raise Exception("Please enter the file path") @@ -52,23 +54,62 @@ udf_name_list = set() +def get_file_md5(path): + """ + 获取文件内容的MD5值 + :param path: 文件所在路径 + :return: + """ + with open(path, 'rb') as file: + data = file.read() + + diff_check = hashlib.md5() + diff_check.update(data) + md5_code = diff_check.hexdigest() + return md5_code + + def list_modules(root_dir): """返回给定目录下所有模块和子模块的名称""" modules = [] - for dirpath, _, filenames in os.walk(root_dir): - for filename in filenames: - if filename.endswith(".py"): - p_ = project_path.replace(os.sep, ".") - module_name = os.path.splitext(os.path.join(dirpath, filename))[0].replace(os.sep, ".").replace( - p_ + ".", "") - modules.append(module_name.replace(root_dir, "")) + if os.path.isdir(root_dir): + sys.path.append(project_path) + for dirpath, _, filenames in os.walk(root_dir): + for filename in filenames: + parse_file(dirpath, filename, modules, root_dir) + else: + file_dir = os.path.dirname(root_dir) + sys.path.append(file_dir) + parse_file(file_dir, root_dir, modules, file_dir) + if project_path.endswith(".py"): + sys.path.append(file_dir) + elif project_path.endswith(".zip"): + tmp_dir = appdirs.user_cache_dir() + file = zipfile.ZipFile(project_path) + unzip_file_path = os.path.normpath(tmp_dir + "/dinky/udf_parse/" + get_file_md5(project_path)) + if os.path.exists(unzip_file_path): + shutil.rmtree(unzip_file_path) + file.extractall(unzip_file_path) + sys.path.append(unzip_file_path) + for dirpath, _, filenames in os.walk(unzip_file_path): + for filename in filenames: + parse_file(dirpath, filename, modules, unzip_file_path) + file.close() return modules +def parse_file(dirpath, filename, modules, root_dir): + root_dir = os.path.normpath(root_dir) + if filename.endswith(".py"): + p_ = root_dir.replace(os.sep, ".") + module_name = os.path.splitext(os.path.join(dirpath, filename))[0].replace(os.sep, ".").replace( + p_ + ".", "") + modules.append(module_name.replace(root_dir, "")) + + if __name__ == '__main__': modules = list_modules(project_path) - sys.path.append(project_path) for module_name in modules: try: module = importlib.import_module(module_name) From 59a322235c369c92008544b325e8a98c1c729446 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Tue, 7 Nov 2023 21:48:25 +0800 Subject: [PATCH 16/21] [Optimize][h2]optimize h2 (#2506) * optimize_h2 * optimize_h2 --- .../src/main/resources/application-h2.yml | 16 +- .../src/main/resources/application.yml | 6 +- .../src/main/resources/db/db-h2-ddl.sql | 619 ++++ .../src/main/resources/db/db-h2-dml.sql | 2692 +++++++++++++++ dinky-admin/src/main/resources/db/db-h2.sql | 2908 ----------------- 5 files changed, 3328 insertions(+), 2913 deletions(-) create mode 100644 dinky-admin/src/main/resources/db/db-h2-ddl.sql create mode 100644 dinky-admin/src/main/resources/db/db-h2-dml.sql delete mode 100644 dinky-admin/src/main/resources/db/db-h2.sql diff --git a/dinky-admin/src/main/resources/application-h2.yml b/dinky-admin/src/main/resources/application-h2.yml index 0f732c55c4..37a3d158e6 100644 --- a/dinky-admin/src/main/resources/application-h2.yml +++ b/dinky-admin/src/main/resources/application-h2.yml @@ -18,8 +18,18 @@ spring: datasource: driver-class-name: org.h2.Driver - url: jdbc:h2:mem:dinky;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:db/db-h2.sql' +# url: jdbc:h2:mem:dinky;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true; + url: jdbc:h2:./tmp/db/h2;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;AUTO_SERVER=TRUE;AUTO_SERVER_PORT=9092; + druid: + initial-size: 1 username: dinky password: dinky - druid: - initial-size: 1 \ No newline at end of file + h2: + console: + enabled: true + path: /h2 + sql: + init: + schema-locations: classpath:db/db-h2-ddl.sql + mode: always + data-locations: classpath:db/db-h2-dml.sql \ No newline at end of file diff --git a/dinky-admin/src/main/resources/application.yml b/dinky-admin/src/main/resources/application.yml index 63f6d7c422..cea7efc5f2 100644 --- a/dinky-admin/src/main/resources/application.yml +++ b/dinky-admin/src/main/resources/application.yml @@ -15,7 +15,8 @@ spring: # If you use pgsql database, please configure pgsql database connection information in application-pgsql.yml # If you use the h2 database, please configure the h2 database connection information in application-h2.yml, # note: the h2 database is only for experience use, and the related data that has been created cannot be migrated, please use it with caution - active: h2,jmx #[h2,mysql,pgsql] + active: h2 #[h2,mysql,pgsql] + include: jmx # mvc config mvc: @@ -147,12 +148,13 @@ sa-token: is-read-header: true token-name: token - ################################################################################################################# ################################################# knife4j Config ################################################ ################################################################################################################# knife4j: enable: true + setting: + language: en sms: is-print: false diff --git a/dinky-admin/src/main/resources/db/db-h2-ddl.sql b/dinky-admin/src/main/resources/db/db-h2-ddl.sql new file mode 100644 index 0000000000..433035afae --- /dev/null +++ b/dinky-admin/src/main/resources/db/db-h2-ddl.sql @@ -0,0 +1,619 @@ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; +DROP TABLE IF EXISTS `dinky_alert_group`; +CREATE TABLE `dinky_alert_group` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `name` varchar(50) NOT null COMMENT 'alert group name', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `alert_instance_ids` text null COMMENT 'Alert instance IDS', + `note` varchar(255) null DEFAULT null COMMENT 'note', + `enabled` tinyint(4) null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_alert_history`; +CREATE TABLE `dinky_alert_history` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `alert_group_id` int(11) null DEFAULT null COMMENT 'Alert group ID', + `job_instance_id` int(11) null DEFAULT null COMMENT 'job instance ID', + `title` varchar(255) null DEFAULT null COMMENT 'alert title', + `content` text null COMMENT 'content description', + `status` int(11) null DEFAULT null COMMENT 'alert status', + `log` text null COMMENT 'log', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_alert_instance`; +CREATE TABLE `dinky_alert_instance` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `name` varchar(50) NOT null COMMENT 'alert instance name', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `type` varchar(50) null DEFAULT null COMMENT 'alert instance type such as: DingTalk,Wechat(Webhook,app) Feishu ,email', + `params` text null COMMENT 'configuration', + `enabled` tinyint(4) null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_catalogue`; +CREATE TABLE `dinky_catalogue` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `task_id` int(11) null DEFAULT null COMMENT 'Job ID', + `name` varchar(100) NOT null COMMENT 'Job Name', + `type` varchar(50) null DEFAULT null COMMENT 'Job Type', + `parent_id` int(11) NOT null DEFAULT 0 COMMENT 'parent ID', + `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', + `is_leaf` tinyint(1) NOT null COMMENT 'is leaf node', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_cluster`; +CREATE TABLE `dinky_cluster` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `name` varchar(255) NOT null COMMENT 'cluster instance name', + `alias` varchar(255) null DEFAULT null COMMENT 'cluster instance alias', + `type` varchar(50) null DEFAULT null COMMENT 'cluster types', + `hosts` text null COMMENT 'cluster hosts', + `job_manager_host` varchar(255) null DEFAULT null COMMENT 'Job Manager Host', + `version` varchar(20) null DEFAULT null COMMENT 'version', + `status` int(11) null DEFAULT null COMMENT 'cluster status', + `note` varchar(255) null DEFAULT null COMMENT 'note', + `auto_registers` tinyint(1) null DEFAULT 0 COMMENT 'is auto registration', + `cluster_configuration_id` int(11) null DEFAULT null COMMENT 'cluster configuration id', + `task_id` int(11) null DEFAULT null COMMENT 'task ID', + `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_cluster_configuration`; +CREATE TABLE `dinky_cluster_configuration` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `name` varchar(255) NOT null COMMENT 'cluster configuration name', + `type` varchar(50) null DEFAULT null COMMENT 'cluster type', + `config_json` text null COMMENT 'json of configuration', + `is_available` tinyint(1) NOT null DEFAULT 0 COMMENT 'is available', + `note` varchar(255) null DEFAULT null COMMENT 'note', + `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_database`; +CREATE TABLE `dinky_database` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `name` varchar(30) NOT null COMMENT 'database name', + `group_name` varchar(255) null DEFAULT 'Default' COMMENT 'database belong group name', + `type` varchar(50) NOT null COMMENT 'database type', + `ip` varchar(255) null DEFAULT null COMMENT 'database ip', + `port` int(11) null DEFAULT null COMMENT 'database port', + `url` varchar(255) null DEFAULT null COMMENT 'database url', + `username` varchar(50) null DEFAULT null COMMENT 'username', + `password` varchar(512) null DEFAULT null COMMENT 'password', + `note` varchar(255) null DEFAULT null COMMENT 'note', + `flink_config` text null COMMENT 'Flink configuration', + `flink_template` text null COMMENT 'Flink template', + `db_version` varchar(255) null DEFAULT null COMMENT 'version,such as: 11g of oracle ,2.2.3 of hbase', + `status` tinyint(1) null DEFAULT null COMMENT 'heartbeat status', + `health_time` datetime(0) null DEFAULT null COMMENT 'last heartbeat time of trigger', + `heartbeat_time` datetime(0) null DEFAULT null COMMENT 'last heartbeat time', + `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_flink_document`; +CREATE TABLE `dinky_flink_document` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `category` varchar(255) null DEFAULT null COMMENT 'document category', + `type` varchar(255) null DEFAULT null COMMENT 'document type', + `subtype` varchar(255) null DEFAULT null COMMENT 'document subtype', + `name` varchar(255) null DEFAULT null COMMENT 'document name', + `description` longtext null, + `fill_value` longtext null COMMENT 'fill value', + `version` varchar(255) null DEFAULT null COMMENT 'document version such as:(flink1.12,flink1.13,flink1.14,flink1.15)', + `like_num` int(11) null DEFAULT 0 COMMENT 'like number', + `enabled` tinyint(1) NOT null DEFAULT 0 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update_time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_fragment`; +CREATE TABLE `dinky_fragment` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `name` varchar(50) NOT null COMMENT 'fragment name', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `fragment_value` text NOT null COMMENT 'fragment value', + `note` text null COMMENT 'note', + `enabled` tinyint(4) null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_history`; +CREATE TABLE `dinky_history` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `cluster_id` int(11) NOT null DEFAULT 0 COMMENT 'cluster ID', + `cluster_configuration_id` int(11) null DEFAULT null COMMENT 'cluster configuration id', + `session` varchar(255) null DEFAULT null COMMENT 'session', + `job_id` varchar(50) null DEFAULT null COMMENT 'Job ID', + `job_name` varchar(255) null DEFAULT null COMMENT 'Job Name', + `job_manager_address` varchar(255) null DEFAULT null COMMENT 'JJobManager Address', + `status` int(11) NOT null DEFAULT 0 COMMENT 'status', + `type` varchar(50) null DEFAULT null COMMENT 'job type', + `statement` text null COMMENT 'statement set', + `error` text null COMMENT 'error message', + `result` text null COMMENT 'result set', + `config_json` json null COMMENT 'config json', + `start_time` datetime(0) null DEFAULT null COMMENT 'job start time', + `end_time` datetime(0) null DEFAULT null COMMENT 'job end time', + `task_id` int(11) null DEFAULT null COMMENT 'task ID', + INDEX task_index1(`task_id`), + INDEX cluster_index2(`cluster_id`) +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_job_history`; +CREATE TABLE `dinky_job_history` ( + `id` int(11) NOT null COMMENT 'id', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `job_json` json null COMMENT 'Job information json', + `exceptions_json` json null COMMENT 'error message json', + `checkpoints_json` json null COMMENT 'checkpoints json', + `checkpoints_config_json` json null COMMENT 'checkpoints configuration json', + `config_json` json null COMMENT 'configuration', + `cluster_json` json null COMMENT 'cluster instance configuration', + `cluster_configuration_json` json null COMMENT 'cluster config', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_job_instance`; +CREATE TABLE `dinky_job_instance` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `name` varchar(255) null DEFAULT null COMMENT 'job instance name', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `task_id` int(11) null DEFAULT null COMMENT 'task ID', + `step` int(11) null DEFAULT null COMMENT 'job lifecycle', + `cluster_id` int(11) null DEFAULT null COMMENT 'cluster ID', + `jid` varchar(50) null DEFAULT null COMMENT 'Flink JobId', + `status` varchar(50) null DEFAULT null COMMENT 'job instance status', + `history_id` int(11) null DEFAULT null COMMENT 'execution history ID', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `finish_time` datetime(0) null DEFAULT null COMMENT 'finish time', + `duration` bigint(20) null DEFAULT null COMMENT 'job duration', + `error` text null COMMENT 'error logs', + `failed_restart_count` int(11) null DEFAULT null COMMENT 'failed restart count', + INDEX job_instance_task_id_idx13(`task_id`) +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_role`; +CREATE TABLE `dinky_role` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_id` int(11) NOT null COMMENT 'tenant id', + `role_code` varchar(64) NOT null COMMENT 'role code', + `role_name` varchar(64) NOT null COMMENT 'role name', + `is_delete` tinyint(1) NOT null DEFAULT 0 COMMENT 'is delete', + `note` varchar(255) null DEFAULT null COMMENT 'note', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + + +DROP TABLE IF EXISTS `dinky_savepoints`; +CREATE TABLE `dinky_savepoints` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `task_id` int(11) NOT null COMMENT 'task ID', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `name` varchar(255) NOT null COMMENT 'task name', + `type` varchar(255) NOT null COMMENT 'savepoint type', + `path` varchar(255) NOT null COMMENT 'savepoint path', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_sys_config`; +CREATE TABLE `dinky_sys_config` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `name` varchar(255) NOT null COMMENT 'configuration name', + `value` text null COMMENT 'configuration value', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_task`; +CREATE TABLE `dinky_task` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `name` varchar(255) NOT null COMMENT 'Job name', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `dialect` varchar(50) null DEFAULT null COMMENT 'dialect', + `type` varchar(50) null DEFAULT null COMMENT 'Job type', + `check_point` int(11) null DEFAULT null COMMENT 'CheckPoint trigger seconds', + `save_point_strategy` int(11) null DEFAULT null COMMENT 'SavePoint strategy', + `save_point_path` varchar(255) null DEFAULT null COMMENT 'SavePointPath', + `parallelism` int(11) null DEFAULT null COMMENT 'parallelism', + `fragment` tinyint(1) null DEFAULT 0 COMMENT 'fragment', + `statement_set` tinyint(1) null DEFAULT 0 COMMENT 'enable statement set', + `batch_model` tinyint(1) null DEFAULT 0 COMMENT 'use batch model', + `cluster_id` int(11) null DEFAULT null COMMENT 'Flink cluster ID', + `cluster_configuration_id` int(11) null DEFAULT null COMMENT 'cluster configuration ID', + `database_id` int(11) null DEFAULT null COMMENT 'database ID', + `env_id` int(11) null DEFAULT null COMMENT 'env id', + `alert_group_id` bigint(20) null DEFAULT null COMMENT 'alert group id', + `config_json` text null COMMENT 'configuration json', + `note` varchar(255) null DEFAULT null COMMENT 'Job Note', + `step` int(11) null DEFAULT 1 COMMENT 'Job lifecycle', + `job_instance_id` bigint(20) null DEFAULT null COMMENT 'job instance id', + `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `version_id` int(11) null DEFAULT null COMMENT 'version id', + `statement` text null DEFAULT null COMMENT 'statement' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_task_version`; +CREATE TABLE `dinky_task_version` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `task_id` int(11) NOT null COMMENT 'task ID ', + `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', + `version_id` int(11) NOT null COMMENT 'version ID ', + `statement` text null COMMENT 'flink sql statement', + `name` varchar(255) NOT null COMMENT 'version name', + `dialect` varchar(50) null DEFAULT null COMMENT 'dialect', + `type` varchar(50) null DEFAULT null COMMENT 'type', + `task_configure` text NOT null COMMENT 'task configuration', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_tenant`; +CREATE TABLE `dinky_tenant` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `tenant_code` varchar(64) NOT null COMMENT 'tenant code', + `is_delete` tinyint(1) NOT null DEFAULT 0 COMMENT 'is delete', + `note` varchar(255) null DEFAULT null COMMENT 'note', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_udf`; +CREATE TABLE `dinky_udf` ( + `id` int(11) NOT null AUTO_INCREMENT, + `name` varchar(200) null DEFAULT null COMMENT 'udf name', + `class_name` varchar(200) null DEFAULT null COMMENT 'Complete class name', + `source_code` longtext null COMMENT 'source code', + `compiler_code` binary(255) null DEFAULT null COMMENT 'compiler product', + `version_id` int(11) null DEFAULT null COMMENT 'version', + `version_description` varchar(50) null DEFAULT null COMMENT 'version description', + `is_default` tinyint(1) null DEFAULT null COMMENT 'Is it default', + `document_id` int(11) null DEFAULT null COMMENT 'corresponding to the document id', + `from_version_id` int(11) null DEFAULT null COMMENT 'Based on udf version id', + `code_md5` varchar(50) null DEFAULT null COMMENT 'source code of md5', + `dialect` varchar(50) null DEFAULT null COMMENT 'dialect', + `type` varchar(50) null DEFAULT null COMMENT 'type', + `step` int(11) null DEFAULT null COMMENT 'job lifecycle step', + `enable` tinyint(1) null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime DEFAULT null ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `dinky_udf_template`; +CREATE TABLE `dinky_udf_template` ( + `id` int(11) NOT null AUTO_INCREMENT, + `name` varchar(100) null DEFAULT null COMMENT 'template name', + `code_type` varchar(10) null DEFAULT null COMMENT 'code type', + `function_type` varchar(10) null DEFAULT null COMMENT 'function type', + `template_code` longtext null COMMENT 'code', + `enabled` tinyint(1) not null DEFAULT 1 COMMENT 'is enable', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime DEFAULT null ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + + +DROP TABLE IF EXISTS `dinky_user`; +CREATE TABLE `dinky_user` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `username` varchar(50) NOT null COMMENT 'username', + `user_type` int DEFAULT 0 NOT null COMMENT 'login type (0:LOCAL,1:LDAP)', + `password` varchar(50) null DEFAULT null COMMENT 'password', + `nickname` varchar(50) null DEFAULT null COMMENT 'nickname', + `worknum` varchar(50) null DEFAULT null COMMENT 'worknum', + `avatar` blob null COMMENT 'avatar', + `mobile` varchar(20) null DEFAULT null COMMENT 'mobile phone', + `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', + `super_admin_flag` tinyint(1) DEFAULT '0' COMMENT 'is super admin(0:false,1true)', + `is_delete` tinyint(1) NOT null DEFAULT 0 COMMENT 'is delete', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_user_role`; +CREATE TABLE `dinky_user_role` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `user_id` int(11) NOT null COMMENT 'user id', + `role_id` int(11) NOT null COMMENT 'role id', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `dinky_user_tenant`; +CREATE TABLE `dinky_user_tenant` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', + `user_id` int(11) NOT null COMMENT 'user id', + `tenant_id` int(11) NOT null COMMENT 'tenant id', + `tenant_admin_flag` tinyint DEFAULT '0' COMMENT 'tenant admin flag(0:false,1:true)', + `create_time` datetime(0) null DEFAULT null COMMENT 'create time', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; + +DROP TABLE IF EXISTS `metadata_column`; +CREATE TABLE `metadata_column` ( + `column_name` varchar(255) NOT null COMMENT 'column name', + `column_type` varchar(255) NOT null COMMENT 'column type, such as : Physical , Metadata , Computed , WATERMARK', + `data_type` varchar(255) NOT null COMMENT 'data type', + `expr` varchar(255) null DEFAULT null COMMENT 'expression', + `description` varchar(255) NOT null COMMENT 'column description', + `table_id` int(11) NOT null COMMENT 'table id', + `primary` bit(1) null DEFAULT null COMMENT 'table primary key', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `metadata_database`; +CREATE TABLE `metadata_database` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', + `database_name` varchar(255) NOT null COMMENT 'database name', + `description` varchar(255) null DEFAULT null COMMENT 'database description', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `metadata_database_property`; +CREATE TABLE `metadata_database_property` ( + `key` varchar(255) NOT null COMMENT 'key', + `value` varchar(255) null DEFAULT null COMMENT 'value', + `database_id` int(11) NOT null COMMENT 'database id', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `metadata_function`; +CREATE TABLE `metadata_function` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT '主键', + `function_name` varchar(255) NOT null COMMENT 'function name', + `class_name` varchar(255) NOT null COMMENT 'class name', + `database_id` int(11) NOT null COMMENT 'database id', + `function_language` varchar(255) null DEFAULT null COMMENT 'function language', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `metadata_table`; +CREATE TABLE `metadata_table` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT '主键', + `table_name` varchar(255) NOT null COMMENT 'table name', + `table_type` varchar(255) NOT null COMMENT 'type,such as:database,table,view', + `database_id` int(11) NOT null COMMENT 'database id', + `description` varchar(255) null DEFAULT null COMMENT 'table description', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +DROP TABLE IF EXISTS `metadata_table_property`; +CREATE TABLE `metadata_table_property` ( + `key` varchar(255) NOT null COMMENT 'key', + `value` mediumtext null COMMENT 'value', + `table_id` int(11) NOT null COMMENT 'table id', + `update_time` datetime(0) null DEFAULT null COMMENT 'update time', + `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create tiime' +) ENGINE = InnoDB ROW_FORMAT = Dynamic; +-- ---------------------------- +-- Records of metadata_table_property +-- ---------------------------- +-- ---------------------------- +-- Table structure for dinky_row_permissions +-- ---------------------------- +DROP TABLE IF EXISTS `dinky_row_permissions`; +CREATE TABLE dinky_row_permissions ( + id int PRIMARY KEY AUTO_INCREMENT COMMENT 'ID', + role_id int NOT null COMMENT '角色ID', + table_name varchar(255) null COMMENT '表名', + expression varchar(255) null COMMENT '表达式', + create_time datetime null COMMENT '创建时间', + update_time datetime null COMMENT '更新时间' +); +SET FOREIGN_KEY_CHECKS = 1; +DROP TABLE IF EXISTS `dinky_git_project`; +CREATE TABLE `dinky_git_project` ( + `id` bigint(20) NOT null AUTO_INCREMENT, + `tenant_id` bigint(20) NOT null, + `name` varchar(255) NOT null, + `url` varchar(1000) NOT null, + `branch` varchar(1000) NOT null, + `username` varchar(255) DEFAULT null, + `password` varchar(255) DEFAULT null, + `private_key` varchar(255) DEFAULT null COMMENT 'keypath', + `pom` varchar(255) DEFAULT null, + `build_args` varchar(255) DEFAULT null, + `code_type` tinyint(4) DEFAULT null COMMENT 'code type(1-java,2-python)', + `type` tinyint(4) NOT null COMMENT '1-http ,2-ssh', + `last_build` datetime DEFAULT null, + `description` varchar(255) DEFAULT null, + `build_state` tinyint(2) NOT null DEFAULT '0' COMMENT '0-notStart 1-process 2-failed 3-success', + `build_step` tinyint(2) NOT null DEFAULT '0', + `enabled` tinyint(1) NOT null DEFAULT '1' COMMENT '0-disable 1-enable', + `udf_class_map_list` text COMMENT 'scan udf class', + `order_line` int(11) NOT null DEFAULT '1' COMMENT 'order', + `create_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' +) ENGINE = InnoDB; + +DROP TABLE IF EXISTS dinky_metrics; +CREATE TABLE `dinky_metrics` ( + `id` int(11) NOT null AUTO_INCREMENT, + `task_id` int(255) DEFAULT null, + `vertices` varchar(255) DEFAULT null, + `metrics` varchar(255) DEFAULT null, + `position` int(11) DEFAULT null, + `show_type` varchar(255) DEFAULT null, + `show_size` varchar(255) DEFAULT null, + `title` CLOB DEFAULT null, + `layout_name` varchar(255) DEFAULT null, + `create_time` datetime DEFAULT null, + `update_time` datetime DEFAULT null +) ENGINE = InnoDB; + +DROP TABLE IF EXISTS dinky_resources; +CREATE TABLE `dinky_resources` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'key', + `file_name` varchar(64) DEFAULT null COMMENT 'file name', + `description` varchar(255) DEFAULT null, + `user_id` int(11) DEFAULT null COMMENT 'user id', + `type` tinyint(4) DEFAULT null COMMENT 'resource type,0:FILE,1:UDF', + `size` bigint(20) DEFAULT null COMMENT 'resource size', + `pid` int(11) DEFAULT null, + `full_name` varchar(128) DEFAULT null, + `is_directory` tinyint(4) DEFAULT null, + `create_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' +) ENGINE = InnoDB; + + +-- ---------------------------- +-- Table structure for dinky_sys_login_log +-- ---------------------------- +DROP TABLE IF EXISTS dinky_sys_login_log; +CREATE TABLE `dinky_sys_login_log` ( + `id` int(11) NOT null AUTO_INCREMENT COMMENT 'key', + `user_id` int(11) NOT null COMMENT 'user id', + `username` varchar(60) NOT null COMMENT 'username', + `login_type` int NOT null COMMENT 'login type(0:LOCAL,1:LDAP)', + `ip` varchar(40) NOT null COMMENT 'ip addr', + `status` int NOT null COMMENT 'login status', + `msg` text NOT null COMMENT 'status msg', + `create_time` datetime NOT null COMMENT 'create time', + `access_time` datetime DEFAULT null COMMENT 'access time', + `update_time` datetime NOT null, + `is_deleted` tinyint(1) NOT null DEFAULT '0', + PRIMARY KEY (`id`) +) ENGINE=InnoDB; + + +-- ---------------------------- +-- Table structure for dinky_sys_operate_log +-- ---------------------------- +DROP TABLE IF EXISTS `dinky_sys_operate_log`; +CREATE TABLE `dinky_sys_operate_log` ( + `id` bigint NOT null AUTO_INCREMENT COMMENT 'id', + `module_name` varchar(50) DEFAULT '' COMMENT 'module name', + `business_type` int null DEFAULT 0 COMMENT 'business type', + `method` varchar(100) null DEFAULT '' COMMENT 'method name', + `request_method` varchar(10) null DEFAULT '' COMMENT 'request method', + `operate_name` varchar(50) DEFAULT '' COMMENT 'operate name', + `operate_user_id` int NOT null COMMENT 'operate user id', + `operate_url` varchar(255) DEFAULT '' COMMENT 'operate url', + `operate_ip` varchar(50) DEFAULT '' COMMENT 'ip', + `operate_location` varchar(255) DEFAULT '' COMMENT 'operate location', + `operate_param` longtext DEFAULT '' COMMENT 'request param', + `json_result` longtext DEFAULT null COMMENT 'return json result', + `status` int null DEFAULT null COMMENT 'operate status', + `error_msg` longtext DEFAULT null COMMENT 'error msg', + `operate_time` datetime(0) DEFAULT null COMMENT 'operate time', + PRIMARY KEY (`id`) +) ENGINE = InnoDB; + + + +-- ---------------------------- +-- Table structure for dinky_sys_menu +-- ---------------------------- +drop table if exists `dinky_sys_menu`; +create table `dinky_sys_menu` ( + `id` bigint not null auto_increment comment ' id', + `parent_id` bigint not null comment 'parent menu id', + `name` varchar(64) not null comment 'menu button name', + `path` varchar(64) default null comment 'routing path', + `component` varchar(64) default null comment 'routing component component', + `perms` varchar(64) default null comment 'authority id', + `icon` varchar(64) default null comment 'icon', + `type` char(1) default null comment 'type(M:directory C:menu F:button)', + `display` tinyint default 1 comment 'whether the menu is displayed', + `order_num` int default null comment 'sort', + `create_time` datetime not null default current_timestamp comment 'create time', + `update_time` datetime not null default current_timestamp on update current_timestamp comment 'modify time', + `note` varchar(255) default null comment 'note', + primary key (`id`) +) engine=innodb ; + +-- ---------------------------- +-- Table structure dinky_sys_role_menu +-- ---------------------------- +drop table if exists `dinky_sys_role_menu`; +CREATE TABLE `dinky_sys_role_menu` ( + `id` bigint NOT null AUTO_INCREMENT COMMENT 'id', + `role_id` bigint NOT null COMMENT 'role id', + `menu_id` bigint NOT null COMMENT 'menu id', + `create_time` datetime not null default current_timestamp comment 'create time', + `update_time` datetime not null default current_timestamp on update current_timestamp comment 'modify time', + PRIMARY KEY (`id`), + UNIQUE KEY `un_role_menu_inx` (`role_id`,`menu_id`) +) ENGINE=InnoDB ; + + + +-- ---------------------------- +-- Table structure dinky_sys_token +-- ---------------------------- +drop table if exists `dinky_sys_token`; +CREATE TABLE `dinky_sys_token` ( + `id` bigint NOT NULL AUTO_INCREMENT COMMENT 'id', + `token_value` varchar(255) NOT NULL COMMENT 'token value', + `user_id` bigint NOT NULL COMMENT 'user id', + `role_id` bigint NOT NULL COMMENT 'role id', + `tenant_id` bigint NOT NULL COMMENT 'tenant id', + `expire_type` tinyint NOT NULL COMMENT '1: never expire, 2: expire after a period of time, 3: expire at a certain time', + `expire_start_time` datetime DEFAULT NULL COMMENT 'expire start time ,when expire_type = 3 , it is the start time of the period', + `expire_end_time` datetime DEFAULT NULL COMMENT 'expire end time ,when expire_type = 2,3 , it is the end time of the period', + `create_time` datetime NOT NULL COMMENT 'create time', + `update_time` datetime NOT NULL COMMENT 'modify time', + `creator` bigint DEFAULT NULL COMMENT '创建人', + `updator` bigint DEFAULT NULL COMMENT '修改人', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COMMENT='token management'; + + + +-- ---------------------------- +-- Table structure dinky_sys_alert +-- ---------------------------- +drop table if exists `dinky_alert_template`; +create table if not exists dinky_alert_template +( + id int auto_increment + primary key COMMENT 'id', + name varchar(20) COMMENT 'template name', + template_content text null COMMENT 'template content', + enabled tinyint default 1 null COMMENT 'is enable', + create_time datetime null COMMENT 'create time', + update_time datetime null COMMENT 'update time' +); + +drop table if exists `dinky_alert_rules`; +create table if not exists dinky_alert_rules +( + id int auto_increment + primary key comment 'id', + name varchar(40) unique not null comment 'rule name', + rule text null comment 'specify rule', + template_id int null comment 'template id', + rule_type varchar(10) null comment 'alert rule type', + trigger_conditions varchar(20) null comment 'trigger conditions', + description text null comment 'description', + enabled tinyint default 1 null comment 'is enable', + create_time datetime null comment 'create time', + update_time datetime null comment 'update time' +); + + + + +CREATE TABLE IF NOT EXISTS `dinky_udf_manage` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(50) DEFAULT NULL COMMENT 'udf name', + `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', + `task_id` int(11) DEFAULT NULL COMMENT 'task id', + `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', + `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', + `create_time` datetime DEFAULT NULL COMMENT 'create time', + `update_time` datetime DEFAULT NULL COMMENT 'update time' +) ENGINE = InnoDB ROW_FORMAT = DYNAMIC; diff --git a/dinky-admin/src/main/resources/db/db-h2-dml.sql b/dinky-admin/src/main/resources/db/db-h2-dml.sql new file mode 100644 index 0000000000..bcbcb1bc84 --- /dev/null +++ b/dinky-admin/src/main/resources/db/db-h2-dml.sql @@ -0,0 +1,2692 @@ +INSERT INTO `dinky_role` +VALUES ( 1, 1, 'SuperAdmin', 'SuperAdmin', 0 + , 'SuperAdmin of Role', '2022-12-13 05:27:19', '2022-12-13 05:27:19'); + +INSERT INTO `dinky_tenant` +VALUES ( 1, 'DefaultTenant', 0, 'DefaultTenant', '2022-12-13 05:27:19' + , '2022-12-13 05:27:19'); + +INSERT INTO `dinky_user` +VALUES ( 1, 'admin', 1, '21232f297a57a5a743894a0e4a801fc3', 'Admin', 'Dinky-001' + , null, '17777777777', 1, 1, 0, '2022-12-13 05:27:19' + , '2022-12-13 05:27:19'); + +INSERT INTO `dinky_user_role` +VALUES (1, 1, 1, '2022-12-13 05:27:19', '2022-12-13 05:27:19'); + +INSERT INTO `dinky_user_tenant` (`id`, `user_id`, `tenant_id`, `create_time`, `update_time`) +VALUES (1, 1, 1, current_time, current_time); + +INSERT INTO `dinky_git_project` ( `id`, `tenant_id`, `name`, `url`, `branch` + , `username`, `password`, `private_key`, `pom`, `build_args` + , `code_type`, `type`, `last_build`, `description`, `build_state` + , `build_step`, `enabled`, `udf_class_map_list`, `order_line`) +VALUES ( 1, 1, 'java-udf', 'https://github.com/zackyoungh/dinky-quickstart-java.git', 'master' + , null, null, null, null, '-P flink-1.14' + , 1, 1, null, null, 0 + , 0, 1, '[]', 1); +INSERT INTO `dinky_git_project` ( `id`, `tenant_id`, `name`, `url`, `branch` + , `username`, `password`, `private_key`, `pom`, `build_args` + , `code_type`, `type`, `last_build`, `description`, `build_state` + , `build_step`, `enabled`, `udf_class_map_list`, `order_line`) +VALUES ( 2, 1, 'python-udf', 'https://github.com/zackyoungh/dinky-quickstart-python.git', 'master' + , null, null, null, null, '' + , 2, 1, null, null, 0 + , 0, 1, '[]', 2); + +INSERT INTO `dinky_resources` (`id`, `file_name`, `description`, `user_id`, `type`, `size`, `pid`, `full_name`, + `is_directory`) +VALUES (0, 'Root', 'main folder', 1, 0, 0, -1, '/', 1); + + +INSERT INTO `dinky_sys_menu` +VALUES (1, -1, '首页', '/home', './Home', 'home', 'HomeOutlined', 'C', 0, 1, '2023-08-11 14:06:52', + '2023-09-25 18:26:45', null); +INSERT INTO `dinky_sys_menu` +VALUES (2, -1, '运维中心', '/devops', null, 'devops', 'ControlOutlined', 'M', 0, 20, '2023-08-11 14:06:52', + '2023-09-26 14:53:34', null); +INSERT INTO `dinky_sys_menu` +VALUES (3, -1, '注册中心', '/registration', null, 'registration', 'AppstoreOutlined', 'M', 0, 23, '2023-08-11 14:06:52', + '2023-09-26 14:54:03', null); +INSERT INTO `dinky_sys_menu` +VALUES (4, -1, '认证中心', '/auth', null, 'auth', 'SafetyCertificateOutlined', 'M', 0, 79, '2023-08-11 14:06:52', + '2023-09-26 15:08:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (5, -1, '数据开发', '/datastudio', './DataStudio', 'datastudio', 'CodeOutlined', 'C', 0, 4, + '2023-08-11 14:06:52', '2023-09-26 14:49:12', null); +INSERT INTO `dinky_sys_menu` +VALUES (6, -1, '配置中心', '/settings', null, 'settings', 'SettingOutlined', 'M', 0, 115, '2023-08-11 14:06:53', + '2023-09-26 15:16:03', null); +INSERT INTO `dinky_sys_menu` +VALUES (7, -1, '关于', '/about', './Other/About', 'about', 'SmileOutlined', 'C', 0, 143, '2023-08-11 14:06:53', + '2023-09-26 15:21:21', null); +INSERT INTO `dinky_sys_menu` +VALUES (8, -1, '监控', '/metrics', './Metrics', 'metrics', 'DashboardOutlined', 'C', 0, 140, '2023-08-11 14:06:53', + '2023-09-26 15:20:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (9, 3, '集群', '/registration/cluster', null, 'registration:cluster', 'GoldOutlined', 'M', 0, 24, + '2023-08-11 14:06:54', '2023-09-26 14:54:19', null); +INSERT INTO `dinky_sys_menu` +VALUES (10, 3, '数据源', '/registration/datasource', './RegCenter/DataSource', 'registration:datasource', + 'DatabaseOutlined', 'M', 0, 37, '2023-08-11 14:06:54', '2023-09-26 14:59:31', null); +INSERT INTO `dinky_sys_menu` +VALUES (11, -1, '个人中心', '/account/center', './Other/PersonCenter', 'account:center', 'UserOutlined', 'C', 0, 144, + '2023-08-11 14:06:54', '2023-09-26 15:21:29', null); +INSERT INTO `dinky_sys_menu` +VALUES (12, 3, '告警', '/registration/alert', null, 'registration:alert', 'AlertOutlined', 'M', 0, 43, + '2023-08-11 14:06:54', '2023-09-26 15:01:32', null); +INSERT INTO `dinky_sys_menu` +VALUES (13, 3, '文档', '/registration/document', './RegCenter/Document', 'registration:document', 'BookOutlined', 'C', + 0, 55, '2023-08-11 14:06:54', '2023-09-26 15:03:59', null); +INSERT INTO `dinky_sys_menu` +VALUES (14, 3, '全局变量', '/registration/fragment', './RegCenter/GlobalVar', 'registration:fragment', 'RocketOutlined', + 'C', 0, 59, '2023-08-11 14:06:54', '2023-09-26 15:04:55', null); +INSERT INTO `dinky_sys_menu` +VALUES (15, 3, 'Git 项目', '/registration/gitproject', './RegCenter/GitProject', 'registration:gitproject', + 'GithubOutlined', 'C', 0, 63, '2023-08-11 14:06:54', '2023-09-26 15:05:37', null); +INSERT INTO `dinky_sys_menu` +VALUES (16, 3, 'UDF 模版', '/registration/udf', './RegCenter/UDF', 'registration:udf', 'ToolOutlined', 'C', 0, 69, + '2023-08-11 14:06:54', '2023-09-26 15:06:40', null); +INSERT INTO `dinky_sys_menu` +VALUES (17, 2, 'job-detail', '/devops/job-detail', './DevOps/JobDetail', 'devops:job-detail', 'InfoCircleOutlined', 'C', + 0, 22, '2023-08-11 14:06:54', '2023-09-26 14:53:53', null); +INSERT INTO `dinky_sys_menu` +VALUES (18, 2, 'job', '/devops/joblist', './DevOps', 'devops:joblist', 'AppstoreFilled', 'C', 0, 21, + '2023-08-11 14:06:54', '2023-09-26 14:53:43', null); +INSERT INTO `dinky_sys_menu` +VALUES (19, 3, '资源中心', '/registration/resource', './RegCenter/Resource', 'registration:resource', 'FileZipOutlined', + 'C', 0, 73, '2023-08-11 14:06:54', '2023-09-26 15:07:25', null); +INSERT INTO `dinky_sys_menu` +VALUES (20, 4, '角色', '/auth/role', './AuthCenter/Role', 'auth:role', 'TeamOutlined', 'C', 0, 88, + '2023-08-11 14:06:54', '2023-09-26 15:10:19', null); +INSERT INTO `dinky_sys_menu` +VALUES (21, 4, '用户', '/auth/user', './AuthCenter/User', 'auth:user', 'UserOutlined', 'C', 0, 80, + '2023-08-11 14:06:54', '2023-09-26 15:08:51', null); +INSERT INTO `dinky_sys_menu` +VALUES (22, 4, '菜单', '/auth/menu', './AuthCenter/Menu', 'auth:menu', 'MenuOutlined', 'C', 0, 94, + '2023-08-11 14:06:54', '2023-09-26 15:11:34', null); +INSERT INTO `dinky_sys_menu` +VALUES (23, 4, '租户', '/auth/tenant', './AuthCenter/Tenant', 'auth:tenant', 'SecurityScanOutlined', 'C', 0, 104, + '2023-08-11 14:06:54', '2023-09-26 15:13:35', null); +INSERT INTO `dinky_sys_menu` +VALUES (24, 6, '全局设置', '/settings/globalsetting', './SettingCenter/GlobalSetting', 'settings:globalsetting', + 'SettingOutlined', 'C', 0, 116, '2023-08-11 14:06:54', '2023-09-26 15:16:12', null); +INSERT INTO `dinky_sys_menu` +VALUES (25, 6, '系统日志', '/settings/systemlog', './SettingCenter/SystemLogs', 'settings:systemlog', + 'InfoCircleOutlined', 'C', 0, 131, '2023-08-11 14:06:55', '2023-09-26 15:18:53', null); +INSERT INTO `dinky_sys_menu` +VALUES (26, 6, '进程', '/settings/process', './SettingCenter/Process', 'settings:process', 'ReconciliationOutlined', + 'C', 0, 135, '2023-08-11 14:06:55', '2023-09-26 15:19:35', null); +INSERT INTO `dinky_sys_menu` +VALUES (27, 4, '行权限', '/auth/rowpermissions', './AuthCenter/RowPermissions', 'auth:rowpermissions', + 'SafetyCertificateOutlined', 'C', 0, 100, '2023-08-11 14:06:55', '2023-09-26 15:12:46', null); +INSERT INTO `dinky_sys_menu` +VALUES (28, 9, 'Flink 实例', '/registration/cluster/instance', './RegCenter/Cluster/Instance', + 'registration:cluster:instance', 'ReconciliationOutlined', 'C', 0, 25, '2023-08-11 14:06:55', + '2023-09-26 14:54:29', null); +INSERT INTO `dinky_sys_menu` +VALUES (29, 12, '告警组', '/registration/alert/group', './RegCenter/Alert/AlertGroup', 'registration:alert:group', + 'AlertOutlined', 'C', 0, 48, '2023-08-11 14:06:55', '2023-09-26 15:02:23', null); +INSERT INTO `dinky_sys_menu` +VALUES (30, 9, '集群配置', '/registration/cluster/config', './RegCenter/Cluster/Configuration', + 'registration:cluster:config', 'SettingOutlined', 'C', 0, 31, '2023-08-11 14:06:55', '2023-09-26 14:57:57', + null); +INSERT INTO `dinky_sys_menu` +VALUES (31, 12, '告警实例', '/registration/alert/instance', './RegCenter/Alert/AlertInstance', + 'registration:alert:instance', 'AlertFilled', 'C', 0, 44, '2023-08-11 14:06:55', '2023-09-26 15:01:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (32, 1, '作业监控', '/home/jobOverView', 'JobOverView', 'home:jobOverView', 'AntCloudOutlined', 'F', 0, 2, + '2023-08-15 16:52:59', '2023-09-26 14:48:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (33, 1, '数据开发', '/home/devOverView', 'DevOverView', 'home:devOverView', 'AimOutlined', 'F', 0, 3, + '2023-08-15 16:54:47', '2023-09-26 14:49:00', null); +INSERT INTO `dinky_sys_menu` +VALUES (34, 5, '项目列表', '/datastudio/left/project', null, 'datastudio:left:project', 'ConsoleSqlOutlined', 'F', 0, 5, + '2023-09-01 18:00:39', '2023-09-26 14:49:31', null); +INSERT INTO `dinky_sys_menu` +VALUES (35, 5, '数据源', '/datastudio/left/datasource', null, 'datastudio:left:datasource', 'TableOutlined', 'F', 0, 7, + '2023-09-01 18:01:09', '2023-09-26 14:49:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (36, 5, 'catalog', '/datastudio/left/catalog', null, 'datastudio:left:structure', 'DatabaseOutlined', 'F', 0, 6, + '2023-09-01 18:01:30', '2023-09-26 14:49:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (37, 5, '作业配置', '/datastudio/right/jobConfig', null, 'datastudio:right:jobConfig', 'SettingOutlined', 'F', 0, + 8, '2023-09-01 18:02:15', '2023-09-26 14:50:24', null); +INSERT INTO `dinky_sys_menu` +VALUES (38, 5, '预览配置', '/datastudio/right/previewConfig', null, 'datastudio:right:previewConfig', + 'InsertRowRightOutlined', 'F', 0, 9, '2023-09-01 18:03:08', '2023-09-26 14:50:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (39, 5, '版本历史', '/datastudio/right/historyVision', null, 'datastudio:right:historyVision', 'HistoryOutlined', + 'F', 0, 10, '2023-09-01 18:03:29', '2023-09-26 14:51:03', null); +INSERT INTO `dinky_sys_menu` +VALUES (40, 5, '保存点', '/datastudio/right/savePoint', null, 'datastudio:right:savePoint', 'FolderOutlined', 'F', 0, + 11, '2023-09-01 18:03:58', '2023-09-26 14:51:13', null); +INSERT INTO `dinky_sys_menu` +VALUES (41, 5, '作业信息', '/datastudio/right/jobInfo', null, 'datastudio:right:jobInfo', 'InfoCircleOutlined', 'F', 0, + 8, '2023-09-01 18:04:31', '2023-09-25 18:26:45', null); +INSERT INTO `dinky_sys_menu` +VALUES (42, 5, '控制台', '/datastudio/bottom/console', null, 'datastudio:bottom:console', 'ConsoleSqlOutlined', 'F', 0, + 12, '2023-09-01 18:04:56', '2023-09-26 14:51:24', null); +INSERT INTO `dinky_sys_menu` +VALUES (43, 5, '结果', '/datastudio/bottom/result', null, 'datastudio:bottom:result', 'SearchOutlined', 'F', 0, 13, + '2023-09-01 18:05:16', '2023-09-26 14:51:36', null); +INSERT INTO `dinky_sys_menu` +VALUES (44, 5, 'BI', '/datastudio/bottom/bi', null, 'datastudio:bottom:bi', 'DashboardOutlined', 'F', 0, 14, + '2023-09-01 18:05:43', '2023-09-26 14:51:45', null); +INSERT INTO `dinky_sys_menu` +VALUES (45, 5, '血缘', '/datastudio/bottom/lineage', null, 'datastudio:bottom:lineage', 'PushpinOutlined', 'F', 0, 15, + '2023-09-01 18:07:15', '2023-09-26 14:52:00', null); +INSERT INTO `dinky_sys_menu` +VALUES (46, 5, '表数据监控', '/datastudio/bottom/process', null, 'datastudio:bottom:process', 'TableOutlined', 'F', 0, + 16, '2023-09-01 18:07:55', '2023-09-26 14:52:38', null); +INSERT INTO `dinky_sys_menu` +VALUES (47, 5, '小工具', '/datastudio/bottom/tool', null, 'datastudio:bottom:tool', 'ToolOutlined', 'F', 0, 17, + '2023-09-01 18:08:18', '2023-09-26 14:53:04', null); +INSERT INTO `dinky_sys_menu` +VALUES (48, 28, '新建', '/registration/cluster/instance/add', null, 'registration:cluster:instance:add', 'PlusOutlined', + 'F', 0, 26, '2023-09-06 08:56:45', '2023-09-26 14:56:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (49, 28, '回收', '/registration/cluster/instance/recovery', null, 'registration:cluster:instance:recovery', + 'DeleteFilled', 'F', 0, 29, '2023-09-06 08:57:30', '2023-09-26 14:56:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (50, 28, '编辑', '/registration/cluster/instance/edit', null, 'registration:cluster:instance:edit', + 'EditOutlined', 'F', 0, 27, '2023-09-06 08:56:45', '2023-09-26 14:56:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (51, 28, '删除', '/registration/cluster/instance/delete', null, 'registration:cluster:instance:delete', + 'DeleteOutlined', 'F', 0, 28, '2023-09-06 08:57:30', '2023-09-26 14:56:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (52, 30, '新建', '/registration/cluster/config/add', null, 'registration:cluster:config:add', 'PlusOutlined', + 'F', 0, 32, '2023-09-06 09:00:31', '2023-09-26 14:58:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (53, 30, '编辑', '/registration/cluster/config/edit', null, 'registration:cluster:config:edit', 'EditOutlined', + 'F', 0, 33, '2023-09-06 08:56:45', '2023-09-26 14:58:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (54, 30, '删除', '/registration/cluster/config/delete', null, 'registration:cluster:config:delete', + 'DeleteOutlined', 'F', 0, 34, '2023-09-06 08:57:30', '2023-09-26 14:58:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (55, 10, '新建', '/registration/datasource/add', null, 'registration:datasource:add', 'PlusOutlined', 'F', 0, 38, + '2023-09-06 09:01:05', '2023-09-26 15:00:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (56, 10, '编辑', '/registration/datasource/edit', null, 'registration:datasource:edit', 'EditOutlined', 'F', 0, + 39, '2023-09-06 08:56:45', '2023-09-26 15:00:41', null); +INSERT INTO `dinky_sys_menu` +VALUES (57, 10, '删除', '/registration/datasource/delete', null, 'registration:datasource:delete', 'DeleteOutlined', + 'F', 0, 40, '2023-09-06 08:57:30', '2023-09-26 15:00:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (58, 31, '新建', '/registration/alert/instance/add', null, 'registration:alert:instance:add', 'PlusOutlined', + 'F', 0, 46, '2023-09-06 09:01:05', '2023-09-26 15:02:04', null); +INSERT INTO `dinky_sys_menu` +VALUES (59, 31, '编辑', '/registration/alert/instance/edit', null, 'registration:alert:instance:edit', 'EditOutlined', + 'F', 0, 45, '2023-09-06 08:56:45', '2023-09-26 15:01:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (60, 31, '删除', '/registration/alert/instance/delete', null, 'registration:alert:instance:delete', + 'DeleteOutlined', 'F', 0, 47, '2023-09-06 08:57:30', '2023-09-26 15:02:13', null); +INSERT INTO `dinky_sys_menu` +VALUES (61, 29, '新建', '/registration/alert/group/add', null, 'registration:alert:group:add', 'PlusOutlined', 'F', 0, + 49, '2023-09-06 09:01:05', '2023-09-26 15:02:48', null); +INSERT INTO `dinky_sys_menu` +VALUES (62, 29, '编辑', '/registration/alert/group/edit', null, 'registration:alert:group:edit', 'EditOutlined', 'F', 0, + 49, '2023-09-06 08:56:45', '2023-09-26 15:02:36', null); +INSERT INTO `dinky_sys_menu` +VALUES (63, 29, '删除', '/registration/alert/group/delete', null, 'registration:alert:group:delete', 'DeleteOutlined', + 'F', 0, 50, '2023-09-06 08:57:30', '2023-09-26 15:03:01', null); +INSERT INTO `dinky_sys_menu` +VALUES (64, 13, '新建', '/registration/document/add', null, 'registration:document:add', 'PlusOutlined', 'F', 0, 57, + '2023-09-06 09:01:05', '2023-09-26 15:04:22', null); +INSERT INTO `dinky_sys_menu` +VALUES (65, 13, '编辑', '/registration/document/edit', null, 'registration:document:edit', 'EditOutlined', 'F', 0, 56, + '2023-09-06 08:56:45', '2023-09-26 15:04:13', null); +INSERT INTO `dinky_sys_menu` +VALUES (66, 13, '删除', '/registration/document/delete', null, 'registration:document:delete', 'DeleteOutlined', 'F', 0, + 58, '2023-09-06 08:57:30', '2023-09-26 15:04:32', null); +INSERT INTO `dinky_sys_menu` +VALUES (68, 14, '新建', '/registration/fragment/add', null, 'registration:fragment:add', 'PlusOutlined', 'F', 0, 61, + '2023-09-06 09:01:05', '2023-09-26 15:05:13', null); +INSERT INTO `dinky_sys_menu` +VALUES (69, 14, '编辑', '/registration/fragment/edit', null, 'registration:fragment:edit', 'EditOutlined', 'F', 0, 60, + '2023-09-06 08:56:45', '2023-09-26 15:05:04', null); +INSERT INTO `dinky_sys_menu` +VALUES (70, 14, '删除', '/registration/fragment/delete', null, 'registration:fragment:delete', 'DeleteOutlined', 'F', 0, + 62, '2023-09-06 08:57:30', '2023-09-26 15:05:21', null); +INSERT INTO `dinky_sys_menu` +VALUES (72, 15, '新建', '/registration/gitproject/add', null, 'registration:gitproject:add', 'PlusOutlined', 'F', 0, 65, + '2023-09-06 09:01:05', '2023-09-26 15:06:01', null); +INSERT INTO `dinky_sys_menu` +VALUES (73, 15, '编辑', '/registration/gitproject/edit', null, 'registration:gitproject:edit', 'EditOutlined', 'F', 0, + 64, '2023-09-06 08:56:45', '2023-09-26 15:05:52', null); +INSERT INTO `dinky_sys_menu` +VALUES (74, 15, '删除', '/registration/gitproject/delete', null, 'registration:gitproject:delete', 'DeleteOutlined', + 'F', 0, 66, '2023-09-06 08:57:30', '2023-09-26 15:06:09', null); +INSERT INTO `dinky_sys_menu` +VALUES (76, 15, '构建', '/registration/gitproject/build', null, 'registration:gitproject:build', 'PlaySquareOutlined', + 'F', 0, 67, '2023-09-06 08:57:30', '2023-09-26 15:06:17', null); +INSERT INTO `dinky_sys_menu` +VALUES (77, 15, '查看日志', '/registration/gitproject/showLog', null, 'registration:gitproject:showLog', + 'SearchOutlined', 'F', 0, 68, '2023-09-06 08:57:30', '2023-09-26 15:06:26', null); +INSERT INTO `dinky_sys_menu` +VALUES (78, 16, '新建', '/registration/udf/template/add', null, 'registration:udf:template:add', 'PlusOutlined', 'F', 0, + 71, '2023-09-06 09:01:05', '2023-09-26 15:07:04', null); +INSERT INTO `dinky_sys_menu` +VALUES (79, 16, '编辑', '/registration/udf/template/edit', null, 'registration:udf:template:edit', 'EditOutlined', 'F', + 0, 70, '2023-09-06 08:56:45', '2023-09-26 15:06:48', null); +INSERT INTO `dinky_sys_menu` +VALUES (80, 16, '删除', '/registration/udf/template/delete', null, 'registration:udf:template:delete', 'DeleteOutlined', + 'F', 0, 72, '2023-09-06 08:57:30', '2023-09-26 15:07:12', null); +INSERT INTO `dinky_sys_menu` +VALUES (82, 19, '上传', '/registration/resource/upload', null, 'registration:resource:upload', 'PlusOutlined', 'F', 0, + 77, '2023-09-06 09:01:05', '2023-09-26 15:08:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (83, 19, '重命名', '/registration/resource/rename', null, 'registration:resource:rename', 'EditOutlined', 'F', 0, + 75, '2023-09-06 08:56:45', '2023-09-26 15:07:45', null); +INSERT INTO `dinky_sys_menu` +VALUES (84, 19, '删除', '/registration/resource/delete', null, 'registration:resource:delete', 'DeleteOutlined', 'F', 0, + 76, '2023-09-06 08:57:30', '2023-09-26 15:07:54', null); +INSERT INTO `dinky_sys_menu` +VALUES (85, 19, '创建文件夹', '/registration/resource/addFolder', null, 'registration:resource:addFolder', + 'PlusOutlined', 'F', 0, 74, '2023-09-06 08:57:30', '2023-09-26 15:07:37', null); +INSERT INTO `dinky_sys_menu` +VALUES (86, 4, 'Token 令牌', '/auth/token', './AuthCenter/Token', 'auth:token', 'SecurityScanFilled', 'C', 0, 111, + '2023-09-05 23:14:23', '2023-09-26 15:15:22', null); +INSERT INTO `dinky_sys_menu` +VALUES (87, 21, '添加', '/auth/user/add', null, 'auth:user:add', 'PlusOutlined', 'F', 0, 81, '2023-09-22 22:06:52', + '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (88, 21, '重置密码', '/auth/user/reset', null, 'auth:user:reset', 'RollbackOutlined', 'F', 0, 84, + '2023-09-22 22:08:17', '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (89, 21, '恢复用户', '/auth/user/recovery', null, 'auth:user:recovery', 'RadiusSettingOutlined', 'F', 0, 85, + '2023-09-22 22:08:53', '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (90, 21, '删除', '/auth/user/delete', null, 'auth:user:delete', 'DeleteOutlined', 'F', 0, 83, + '2023-09-22 22:09:29', '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (91, 21, '修改密码', '/auth/user/changePassword', null, 'auth:user:changePassword', 'EditOutlined', 'F', 0, 86, + '2023-09-22 22:10:01', '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (92, 21, '分配角色', '/auth/user/assignRole', null, 'auth:user:assignRole', 'ForwardOutlined', 'F', 0, 87, + '2023-09-22 22:10:31', '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (93, 21, '编辑', '/auth/user/edit', null, 'auth:user:edit', 'EditOutlined', 'F', 0, 82, '2023-09-22 22:11:41', + '2023-09-26 15:09:49', null); +INSERT INTO `dinky_sys_menu` +VALUES (94, 20, '添加', '/auth/role/add', null, 'auth:role:add', 'PlusOutlined', 'F', 0, 89, '2023-09-22 22:06:52', + '2023-09-26 15:11:10', null); +INSERT INTO `dinky_sys_menu` +VALUES (95, 20, '删除', '/auth/role/delete', null, 'auth:role:delete', 'DeleteOutlined', 'F', 0, 91, + '2023-09-22 22:09:29', '2023-09-26 15:11:10', null); +INSERT INTO `dinky_sys_menu` +VALUES (96, 20, '分配菜单', '/auth/role/assignMenu', null, 'auth:role:assignMenu', 'AntDesignOutlined', 'F', 0, 92, + '2023-09-22 22:10:31', '2023-09-26 15:11:10', null); +INSERT INTO `dinky_sys_menu` +VALUES (97, 20, '编辑', '/auth/role/edit', null, 'auth:role:edit', 'EditOutlined', 'F', 0, 90, '2023-09-22 22:11:41', + '2023-09-26 15:11:10', null); +INSERT INTO `dinky_sys_menu` +VALUES (98, 20, '查看用户列表', '/auth/role/viewUser', null, 'auth:role:viewUser', 'FundViewOutlined', 'F', 0, 93, + '2023-09-22 22:11:41', '2023-09-26 15:11:10', null); +INSERT INTO `dinky_sys_menu` +VALUES (99, 86, '添加 Token', '/auth/token/add', null, 'auth:token:add', 'PlusOutlined', 'F', 0, 112, + '2023-09-22 22:11:41', '2023-09-26 15:15:46', null); +INSERT INTO `dinky_sys_menu` +VALUES (100, 86, '删除 Token', '/auth/token/delete', null, 'auth:token:delete', 'DeleteOutlined', 'F', 0, 114, + '2023-09-22 22:11:41', '2023-09-26 15:15:46', null); +INSERT INTO `dinky_sys_menu` +VALUES (101, 86, '修改 Token', '/auth/token/edit', null, 'auth:token:edit', 'EditOutlined', 'F', 0, 113, + '2023-09-22 22:11:41', '2023-09-26 15:15:46', null); +INSERT INTO `dinky_sys_menu` +VALUES (102, 27, '添加', '/auth/rowPermissions/add', null, 'auth:rowPermissions:add', 'PlusOutlined', 'F', 0, 101, + '2023-09-22 22:11:41', '2023-09-26 15:13:12', null); +INSERT INTO `dinky_sys_menu` +VALUES (103, 27, '编辑', '/auth/rowPermissions/edit', null, 'auth:rowPermissions:edit', 'EditOutlined', 'F', 0, 102, + '2023-09-22 22:11:41', '2023-09-26 15:13:12', null); +INSERT INTO `dinky_sys_menu` +VALUES (104, 27, '删除', '/auth/rowPermissions/delete', null, 'auth:rowPermissions:delete', 'DeleteOutlined', 'F', 0, + 103, '2023-09-22 22:11:41', '2023-09-26 15:13:12', null); +INSERT INTO `dinky_sys_menu` +VALUES (105, 23, '添加', '/auth/tenant/add', null, 'auth:tenant:add', 'PlusOutlined', 'F', 0, 105, + '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (106, 23, '编辑', '/auth/tenant/edit', null, 'auth:tenant:edit', 'EditOutlined', 'F', 0, 106, + '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (107, 23, '删除', '/auth/tenant/delete', null, 'auth:tenant:delete', 'DeleteOutlined', 'F', 0, 107, + '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (108, 23, '分配用户', '/auth/tenant/assignUser', null, 'auth:tenant:assignUser', 'EuroOutlined', 'F', 0, 108, + '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (109, 23, '查看用户', '/auth/tenant/viewUser', null, 'auth:tenant:viewUser', 'FundViewOutlined', 'F', 0, 109, + '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (110, 23, '设置/取消租户管理员', '/auth/tenant/modifyTenantManager', null, 'auth:tenant:modifyTenantManager', + 'ExclamationCircleOutlined', 'F', 0, 110, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); +INSERT INTO `dinky_sys_menu` +VALUES (111, 22, '创建根菜单', '/auth/menu/createRoot', null, 'auth:menu:createRoot', 'FolderAddOutlined', 'F', 0, 95, + '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); +INSERT INTO `dinky_sys_menu` +VALUES (112, 22, '刷新', '/auth/menu/refresh', null, 'auth:menu:refresh', 'ReloadOutlined', 'F', 0, 97, + '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); +INSERT INTO `dinky_sys_menu` +VALUES (113, 22, '编辑', '/auth/menu/edit', null, 'auth:menu:edit', 'EditOutlined', 'F', 0, 98, '2023-09-22 22:11:41', + '2023-09-26 15:12:26', null); +INSERT INTO `dinky_sys_menu` +VALUES (114, 22, '添加子项', '/auth/menu/addSub', null, 'auth:menu:addSub', 'PlusOutlined', 'F', 0, 96, + '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); +INSERT INTO `dinky_sys_menu` +VALUES (115, 22, '删除', '/auth/menu/delete', null, 'auth:menu:delete', 'DeleteOutlined', 'F', 0, 99, + '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); +INSERT INTO `dinky_sys_menu` +VALUES (116, 6, '告警策略', '/settings/alertrule', './SettingCenter/AlertRule', 'settings:alertrule', 'AndroidOutlined', + 'C', 0, 136, '2023-09-22 23:31:10', '2023-09-26 15:19:52', null); +INSERT INTO `dinky_sys_menu` +VALUES (117, 116, '添加', '/settings/alertrule/add', null, 'settings:alertrule:add', 'PlusOutlined', 'F', 0, 137, + '2023-09-22 23:34:51', '2023-09-26 15:20:03', null); +INSERT INTO `dinky_sys_menu` +VALUES (118, 116, '删除', '/settings/alertrule/delete', null, 'settings:alertrule:delete', 'DeleteOutlined', 'F', 0, + 139, '2023-09-22 23:35:20', '2023-09-26 15:20:21', null); +INSERT INTO `dinky_sys_menu` +VALUES (119, 116, '编辑', '/settings/alertrule/edit', null, 'settings:alertrule:edit', 'EditOutlined', 'F', 0, 138, + '2023-09-22 23:36:32', '2023-09-26 15:20:13', null); +INSERT INTO `dinky_sys_menu` +VALUES (120, 8, 'Dinky 服务监控', '/metrics/server', './Metrics/Server', 'metrics:server', 'DashboardOutlined', 'F', 0, + 141, '2023-09-22 23:37:43', '2023-09-26 15:21:00', null); +INSERT INTO `dinky_sys_menu` +VALUES (121, 8, 'Flink 任务监控', '/metrics/job', './Metrics/Job', 'metrics:job', 'DashboardTwoTone', 'C', 0, 142, + '2023-09-22 23:38:34', '2023-09-26 15:21:08', null); +INSERT INTO `dinky_sys_menu` +VALUES (122, 24, 'Dinky 环境配置', '/settings/globalsetting/dinky', null, 'settings:globalsetting:dinky', + 'SettingOutlined', 'C', 0, 117, '2023-09-22 23:40:30', '2023-09-26 15:16:20', null); +INSERT INTO `dinky_sys_menu` +VALUES (123, 24, 'Flink 环境配置', '/settings/globalsetting/flink', null, 'settings:globalsetting:flink', + 'SettingOutlined', 'C', 0, 119, '2023-09-22 23:40:30', '2023-09-26 15:16:40', null); +INSERT INTO `dinky_sys_menu` +VALUES (124, 24, 'Maven 配置', '/settings/globalsetting/maven', null, 'settings:globalsetting:maven', 'SettingOutlined', + 'C', 0, 121, '2023-09-22 23:40:30', '2023-09-26 15:17:04', null); +INSERT INTO `dinky_sys_menu` +VALUES (125, 24, 'DolphinScheduler 配置', '/settings/globalsetting/ds', null, 'settings:globalsetting:ds', + 'SettingOutlined', 'C', 0, 123, '2023-09-22 23:40:30', '2023-09-26 15:17:23', null); +INSERT INTO `dinky_sys_menu` +VALUES (126, 24, 'LDAP 配置', '/settings/globalsetting/ldap', null, 'settings:globalsetting:ldap', 'SettingOutlined', + 'C', 0, 125, '2023-09-22 23:40:30', '2023-09-26 15:17:41', null); +INSERT INTO `dinky_sys_menu` +VALUES (127, 24, 'Metrics 配置', '/settings/globalsetting/metrics', null, 'settings:globalsetting:metrics', + 'SettingOutlined', 'C', 0, 127, '2023-09-22 23:40:30', '2023-09-26 15:18:06', null); +INSERT INTO `dinky_sys_menu` +VALUES (128, 24, 'Resource 配置', '/settings/globalsetting/resource', null, 'settings:globalsetting:resource', + 'SettingOutlined', 'C', 0, 129, '2023-09-22 23:40:30', '2023-09-26 15:18:27', null); +INSERT INTO `dinky_sys_menu` +VALUES (129, 122, '编辑', '/settings/globalsetting/dinky/edit', null, 'settings:globalsetting:dinky:edit', + 'EditOutlined', 'F', 0, 118, '2023-09-22 23:44:18', '2023-09-26 15:16:29', null); +INSERT INTO `dinky_sys_menu` +VALUES (130, 123, '编辑', '/settings/globalsetting/flink/edit', null, 'settings:globalsetting:flink:edit', + 'EditOutlined', 'F', 0, 120, '2023-09-22 23:44:18', '2023-09-26 15:16:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (131, 124, '编辑', '/settings/globalsetting/maven/edit', null, 'settings:globalsetting:maven:edit', + 'EditOutlined', 'F', 0, 122, '2023-09-22 23:44:18', '2023-09-26 15:17:13', null); +INSERT INTO `dinky_sys_menu` +VALUES (132, 125, '编辑', '/settings/globalsetting/ds/edit', null, 'settings:globalsetting:ds:edit', 'EditOutlined', + 'F', 0, 124, '2023-09-22 23:44:18', '2023-09-26 15:17:32', null); +INSERT INTO `dinky_sys_menu` +VALUES (133, 126, '编辑', '/settings/globalsetting/ldap/edit', null, 'settings:globalsetting:ldap:edit', 'EditOutlined', + 'F', 0, 126, '2023-09-22 23:44:18', '2023-09-26 15:17:51', null); +INSERT INTO `dinky_sys_menu` +VALUES (134, 127, '编辑', '/settings/globalsetting/metrics/edit', null, 'settings:globalsetting:metrics:edit', + 'EditOutlined', 'F', 0, 128, '2023-09-22 23:44:18', '2023-09-26 15:18:16', null); +INSERT INTO `dinky_sys_menu` +VALUES (135, 128, '编辑', '/settings/globalsetting/resource/edit', null, 'settings:globalsetting:resource:edit', + 'EditOutlined', 'F', 0, 130, '2023-09-22 23:44:18', '2023-09-26 15:18:39', null); +INSERT INTO `dinky_sys_menu` +VALUES (136, 12, '告警模版', '/registration/alert/template', './RegCenter/Alert/AlertTemplate', + 'registration:alert:template', 'AlertOutlined', 'C', 0, 51, '2023-09-23 21:34:43', '2023-09-26 15:03:14', null); +INSERT INTO `dinky_sys_menu` +VALUES (137, 136, '添加', '/registration/alert/template/add', null, 'registration:alert:template:add', 'PlusOutlined', + 'F', 0, 52, '2023-09-23 21:36:37', '2023-09-26 15:03:22', null); +INSERT INTO `dinky_sys_menu` +VALUES (138, 136, '编辑', '/registration/alert/template/edit', null, 'registration:alert:template:edit', 'EditOutlined', + 'F', 0, 53, '2023-09-23 21:37:00', '2023-09-26 15:03:30', null); +INSERT INTO `dinky_sys_menu` +VALUES (139, 136, '删除', '/registration/alert/template/delete', null, 'registration:alert:template:delete', + 'DeleteOutlined', 'F', 0, 54, '2023-09-23 21:37:43', '2023-09-26 15:03:37', null); +INSERT INTO `dinky_sys_menu` +VALUES (140, 25, '系统日志', '/settings/systemlog/rootlog', null, 'settings:systemlog:rootlog', 'BankOutlined', 'F', 0, + 133, '2023-09-23 21:43:57', '2023-09-26 15:19:14', null); +INSERT INTO `dinky_sys_menu` +VALUES (141, 25, '日志列表', '/settings/systemlog/loglist', null, 'settings:systemlog:loglist', 'BankOutlined', 'F', 0, + 134, '2023-09-23 21:45:05', '2023-09-26 15:19:23', null); +INSERT INTO `dinky_sys_menu` +VALUES (142, 30, '部署 Session 集群', '/registration/cluster/config/deploy', null, 'registration:cluster:config:deploy', + 'PlayCircleOutlined', 'F', 0, 35, '2023-09-26 13:42:55', '2023-09-26 14:58:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (143, 30, ' 心跳检测', '/registration/cluster/config/heartbeat', null, 'registration:cluster:config:heartbeat', + 'HeartOutlined', 'F', 0, 36, '2023-09-26 13:44:23', '2023-09-26 14:58:50', null); +INSERT INTO `dinky_sys_menu` +VALUES (144, 28, '心跳检测', '/registration/cluster/instance/heartbeat', null, + 'registration:cluster:instance:heartbeat', 'HeartOutlined', 'F', 0, 30, '2023-09-26 13:51:04', + '2023-09-26 14:57:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (145, 10, '心跳检测', '/registration/datasource/heartbeat', null, 'registration:datasource:heartbeat', + 'HeartOutlined', 'F', 0, 41, '2023-09-26 14:00:06', '2023-09-26 15:00:42', null); +INSERT INTO `dinky_sys_menu` +VALUES (146, 10, ' 拷贝', '/registration/datasource/copy', null, 'registration:datasource:copy', 'CopyOutlined', 'F', 0, + 42, '2023-09-26 14:02:28', '2023-09-26 15:00:41', null); + +-- ---------------------------- +-- Records of dinky_alert_rule +-- ---------------------------- +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, + create_time, update_time) +VALUES (3, 'alert.rule.jobFail', + '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"''FAILED''","rulePriority":"1"}]', 1, + 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, + create_time, update_time) +VALUES (4, 'alert.rule.getJobInfoFail', + '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"''UNKNOWN''","rulePriority":"1"}]', 1, + 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, + create_time, update_time) +VALUES (5, 'alert.rule.jobRestart', + '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"''RESTARTING''","rulePriority":"1"}]', 1, + 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, + create_time, update_time) +VALUES (6, 'alert.rule.checkpointFail', + '[{"ruleKey":"checkpointRule.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, + 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, + create_time, update_time) +VALUES (7, 'alert.rule.jobRunException', + '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, + 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); +INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, + create_time, update_time) +VALUES (8, 'alert.rule.checkpointTimeout', + '[{"ruleKey":"checkpointRule.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, + 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); + +INSERT INTO dinky_alert_template +VALUES (1, 'Default', ' +- **Job Name :** ${task.name} +- **Job Status :** ${jobInstance.status} +- **Alert Time :** ${time} +- **Start Time :** ${startTime} +- **End Time :** ${endTime} +- **${(exceptions.rootException)?substring(0,20)}** +[Go toTask Web](http://${taskUrl}) +', 1, null, null); + +INSERT INTO `dinky_udf_template` +VALUES ( 1, 'java_udf', 'Java', 'UDF', '${(package=='''')?string('''',''package ''+package+'';'')} + +import org.apache.flink.table.functions.ScalarFunction; + +public class ${className} extends ScalarFunction { + public String eval(String s) { + return null; + } +}' + , 1, '2022-10-19 09:17:37', '2022-10-25 17:45:57'); +INSERT INTO `dinky_udf_template` +VALUES ( 2, 'java_udtf', 'Java', 'UDTF', '${(package=='''')?string('''',''package ''+package+'';'')} + +import org.apache.flink.table.functions.ScalarFunction; + +@FunctionHint(output = @DataTypeHint("ROW")) +public static class ${className} extends TableFunction { + + public void eval(String str) { + for (String s : str.split(" ")) { + // use collect(...) to emit a row + collect(Row.of(s, s.length())); + } + } +}' + , 1, '2022-10-19 09:22:58', '2022-10-25 17:49:30'); +INSERT INTO `dinky_udf_template` +VALUES ( 3, 'scala_udf', 'Scala', 'UDF', '${(package=='''')?string('''',''package ''+package+'';'')} + +import org.apache.flink.table.api._ +import org.apache.flink.table.functions.ScalarFunction + +// 定义可参数化的函数逻辑 +class ${className} extends ScalarFunction { + def eval(s: String, begin: Integer, end: Integer): String = { + "this is scala" + } +}' + , 1, '2022-10-25 09:21:32', '2022-10-25 17:49:46'); +INSERT INTO `dinky_udf_template` +VALUES ( 4, 'python_udf_1', 'Python', 'UDF', 'from pyflink.table import ScalarFunction, DataTypes +from pyflink.table.udf import udf + +class ${className}(ScalarFunction): + def __init__(self): + pass + + def eval(self, variable): + return str(variable) + + +${attr!''f''} = udf(${className}(), result_type=DataTypes.STRING())' + , 1, '2022-10-25 09:23:07', '2022-10-25 09:34:01'); +INSERT INTO `dinky_udf_template` +VALUES ( 5, 'python_udf_2', 'Python', 'UDF', 'from pyflink.table import DataTypes +from pyflink.table.udf import udf + +@udf(result_type=DataTypes.STRING()) +def ${className}(variable1:str): + return ''''' + , 1, '2022-10-25 09:25:13', '2022-10-25 09:34:47'); + +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 1, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.buffer-capacity' + , '异步查找连接可以触发的最大异步操作的操作数。 +The max number of async i/o operation that the async lookup join can trigger.' + , 'Set ''table.exec.async-lookup.buffer-capacity''=''100'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 2, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.timeout' + , '异步操作完成的超时时间。 +The async timeout for the asynchronous operation to complete.', 'Set ''table.exec.async-lookup.timeout''=''3 min'';' + , '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 3, 'Variable', '优化参数', 'Batch', 'set table.exec.disabled-operators' + , '禁用指定operators,用逗号分隔 +Mainly for testing. A comma-separated list of operator names, each name represents a kind of disabled operator. Operators that can be disabled include "NestedLoopJoin", "ShuffleHashJoin", "BroadcastHashJoin", "SortMergeJoin", "HashAgg", "SortAgg". By default no operator is disabled.' + , 'Set ''table.exec.disabled-operators''=''SortMergeJoin'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 4, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.allow-latency' + , '最大等待时间可用于MiniBatch缓冲输入记录。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。注意:如果将table.exec.mini-batch.enabled设置为true,则其值必须大于零.' + , 'Set ''table.exec.mini-batch.allow-latency''=''-1 ms'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 5, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.enabled' + , '指定是否启用MiniBatch优化。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。默认情况下禁用此功能。 要启用此功能,用户应将此配置设置为true。注意:如果启用了mini batch 处理,则必须设置“ table.exec.mini-batch.allow-latency”和“ table.exec.mini-batch.size”.' + , 'Set ''table.exec.mini-batch.enabled''=''false'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 6, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.size' + , '可以为MiniBatch缓冲最大输入记录数。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。 注意:MiniBatch当前仅适用于非窗口聚合。如果将table.exec.mini-batch.enabled设置为true,则其值必须为正.' + , 'Set ''table.exec.mini-batch.size''=''-1'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 7, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.resource.default-parallelism' + , '设置所有Operator的默认并行度。 +Sets default parallelism for all operators (such as aggregate, join, filter) to run with parallel instances. This config has a higher priority than parallelism of StreamExecutionEnvironment (actually, this config overrides the parallelism of StreamExecutionEnvironment). A value of -1 indicates that no default parallelism is set, then it will fallback to use the parallelism of StreamExecutionEnvironment.' + , 'Set ''table.exec.resource.default-parallelism''=''1'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 8, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.sink.not-null-enforcer' + , '对表的NOT NULL列约束强制执行不能将空值插入到表中。Flink支持“error”(默认)和“drop”强制行为 +The NOT NULL column constraint on a table enforces that null values can''t be inserted into the table. Flink supports ''error'' (default) and ''drop'' enforcement behavior. By default, Flink will check values and throw runtime exception when null values writing into NOT NULL columns. Users can change the behavior to ''drop'' to silently drop such records without throwing exception. +Possible values: +"ERROR" +"DROP"', 'Set ''table.exec.sink.not-null-enforcer''=''ERROR'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 9, 'Variable', '优化参数', 'Streaming', 'set table.exec.sink.upsert-materialize' + , '由于分布式系统中 Shuffle 导致 ChangeLog 数据混乱,Sink 接收到的数据可能不是全局 upsert 的顺序。因此,在 upsert sink 之前添加 upsert materialize 运算符。它接收上游的变更日志记录并为下游生成一个 upsert 视图。默认情况下,当唯一键出现分布式无序时,会添加具体化操作符。您也可以选择不实现(NONE)或强制实现(FORCE)。 +Possible values: +"NONE" +"FORCE" +"AUTO"', 'Set ''table.exec.sink.upsert-materialize''=''AUTO'';', '1.14', 0, 1 + , '2022-01-20 15:00:00', '2022-01-20 15:00:00'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 10, 'Module', '建表语句', 'Other', 'create.table.kafka' + , 'kafka快速建表格式', 'CREATE TABLE Kafka_Table ( + `event_time` TIMESTAMP(3) METADATA FROM ''timestamp'', + `partition` BIGINT METADATA VIRTUAL, + `offset` BIGINT METADATA VIRTUAL, + `user_id` BIGINT, + `item_id` BIGINT, + `behavior` STRING +) WITH ( + ''connector'' = ''kafka'', + ''topic'' = ''user_behavior'', + ''properties.bootstrap.servers'' = ''localhost:9092'', + ''properties.group.id'' = ''testGroup'', + ''scan.startup.mode'' = ''earliest-offset'', + ''format'' = ''csv'' +); +// --可选: ''value.fields-include'' = ''ALL'', +// --可选: ''json.ignore-parse-errors'' = ''true'', +// --可选: ''key.fields-prefix'' = ''k_'',', '1.14', 0, 1 + , '2022-01-20 16:59:18', '2022-01-20 17:57:32'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 11, 'Module', '建表语句', 'Other', 'create.table.doris' + , 'Doris快速建表', 'CREATE TABLE doris_table ( + cid INT, + sid INT, + name STRING, + cls STRING, + score INT, + PRIMARY KEY (cid) NOT ENFORCED +) WITH ( +''connector'' = ''doris'', +''fenodes'' = ''127.0.0.1:8030'' , +''table.identifier'' = ''test.scoreinfo'', +''username'' = ''root'', +''password''='''' +);', '1.14', 0, 1 + , '2022-01-20 17:08:00', '2022-01-20 17:57:26'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 12, 'Module', '建表语句', 'Other', 'create.table.jdbc' + , 'JDBC建表语句', 'CREATE TABLE JDBC_table ( + id BIGINT, + name STRING, + age INT, + status BOOLEAN, + PRIMARY KEY (id) NOT ENFORCED +) WITH ( + ''connector'' = ''jdbc'', + ''url'' = ''jdbc:mysql://localhost:3306/mydatabase'', + ''table-name'' = ''users'', + ''username'' = ''root'', + ''password'' = ''123456'' +); +// --可选: ''sink.parallelism''=''1'', +// --可选: ''lookup.cache.ttl''=''1000s'',', '1.14', 0, 1 + , '2022-01-20 17:15:26', '2022-01-20 17:57:20'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 13, 'Module', 'CataLog', 'Other', 'create.catalog.hive' + , '创建HIVE的catalog', 'CREATE CATALOG hive WITH ( + ''type'' = ''hive'', + ''default-database'' = ''default'', + ''hive-conf-dir'' = ''/app/wwwroot/MBDC/hive/conf/'', // --hive配置文件 + ''hadoop-conf-dir''=''/app/wwwroot/MBDC/hadoop/etc/hadoop/'' // --hadoop配置文件,配了环境变量则不需要。 +);', '1.14', 0, 1 + , '2022-01-20 17:18:54', '2022-01-20 17:18:54'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 14, 'Operator', 'CataLog', 'Other', 'use.catalog.hive' + , '使用hive的catalog', 'USE CATALOG hive;', '1.14', 0, 1 + , '2022-01-20 17:22:53', '2022-01-20 17:22:53'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 15, 'Operator', 'CataLog', 'Other', 'use.catalog.default' + , '使用default的catalog', 'USE CATALOG default_catalog;', '1.14', 0, 1 + , '2022-01-20 17:23:48', '2022-01-20 17:24:23'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 16, 'Variable', '设置参数', 'Other', 'set dialect.hive' + , '使用hive方言', 'Set table.sql-dialect=hive;', '1.14', 0, 1 + , '2022-01-20 17:25:37', '2022-01-20 17:27:23'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 17, 'Variable', '设置参数', 'Other', 'set dialect.default' + , '使用default方言', 'Set table.sql-dialect=default;', '1.14', 0, 1 + , '2022-01-20 17:26:19', '2022-01-20 17:27:20'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 18, 'Module', '建表语句', 'Other', 'create.stream.table.hive' + , '创建流式HIVE表', 'CREATE CATALOG hive WITH ( // --创建hive的catalog + ''type'' = ''hive'', + ''hive-conf-dir'' = ''/app/wwwroot/MBDC/hive/conf/'', + ''hadoop-conf-dir''=''/app/wwwroot/MBDC/hadoop/etc/hadoop/'' +); + +USE CATALOG hive; +USE offline_db; // --选择库 +set table.sql-dialect=hive; // --设置方言 + +CREATE TABLE hive_stream_table ( + user_id STRING, + order_amount DOUBLE +) PARTITIONED BY (dt STRING, hr STRING) STORED AS parquet TBLPROPERTIES ( + ''partition.time-extractor.timestamp-pattern''=''$dt $hr:00:00'', + ''sink.partition-commit.trigger''=''partition-time'', + ''sink.partition-commit.delay''=''1min'', + ''sink.semantic'' = ''exactly-once'', + ''sink.rolling-policy.rollover-interval'' =''1min'', + ''sink.rolling-policy.check-interval''=''1min'', + ''sink.partition-commit.policy.kind''=''metastore,success-file'' +);', '1.14', 0, 1 + , '2022-01-20 17:34:06', '2022-01-20 17:46:41'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 19, 'Module', '建表语句', 'Other', 'create.table.mysql_cdc' + , '创建Mysql_CDC表', 'CREATE TABLE mysql_cdc_table( + cid INT, + sid INT, + cls STRING, + score INT, + PRIMARY KEY (cid) NOT ENFORCED +) WITH ( +''connector'' = ''mysql-cdc'', +''hostname'' = ''127.0.0.1'', +''port'' = ''3306'', +''username'' = ''test'', +''password'' = ''123456'', +''database-name'' = ''test'', +''server-time-zone'' = ''UTC'', +''scan.incremental.snapshot.enabled'' = ''true'', +''debezium.snapshot.mode''=''latest-offset'' ,// -- 或者key是scan.startup.mode,initial表示要历史数据,latest-offset表示不要历史数据 +''debezium.datetime.format.date''=''yyyy-MM-dd'', +''debezium.datetime.format.time''=''HH-mm-ss'', +''debezium.datetime.format.datetime''=''yyyy-MM-dd HH-mm-ss'', +''debezium.datetime.format.timestamp''=''yyyy-MM-dd HH-mm-ss'', +''debezium.datetime.format.timestamp.zone''=''UTC+8'', +''table-name'' = ''mysql_cdc_table'');', '1.14', 0, 1 + , '2022-01-20 17:49:14', '2022-01-20 17:52:20'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 20, 'Module', '建表语句', 'Other', 'create.table.hudi' + , '创建hudi表', 'CREATE TABLE hudi_table +( + `goods_order_id` bigint COMMENT ''自增主键id'', + `goods_order_uid` string COMMENT ''订单uid'', + `customer_uid` string COMMENT ''客户uid'', + `customer_name` string COMMENT ''客户name'', + `create_time` timestamp(3) COMMENT ''创建时间'', + `update_time` timestamp(3) COMMENT ''更新时间'', + `create_by` string COMMENT ''创建人uid(唯一标识)'', + `update_by` string COMMENT ''更新人uid(唯一标识)'', + PRIMARY KEY (goods_order_id) NOT ENFORCED +) COMMENT ''hudi_table'' +WITH ( +''connector'' = ''hudi'', +''path'' = ''hdfs://cluster1/data/bizdata/cdc/mysql/order/goods_order'', // -- 路径会自动创建 +''hoodie.datasource.write.recordkey.field'' = ''goods_order_id'', // -- 主键 +''write.precombine.field'' = ''update_time'', // -- 相同的键值时,取此字段最大值,默认ts字段 +''read.streaming.skip_compaction'' = ''true'', // -- 避免重复消费问题 +''write.bucket_assign.tasks'' = ''2'', // -- 并发写的 bucekt 数 +''write.tasks'' = ''2'', +''compaction.tasks'' = ''1'', +''write.operation'' = ''upsert'', // -- UPSERT(插入更新)\\INSERT(插入)\\BULK_INSERT(批插入)(upsert性能会低些,不适合埋点上报) +''write.rate.limit'' = ''20000'', // -- 限制每秒多少条 +''table.type'' = ''COPY_ON_WRITE'', // -- 默认COPY_ON_WRITE , +''compaction.async.enabled'' = ''true'', // -- 在线压缩 +''compaction.trigger.strategy'' = ''num_or_time'', // -- 按次数压缩 +''compaction.delta_commits'' = ''20'', // -- 默认为5 +''compaction.delta_seconds'' = ''60'', // -- 默认为1小时 +''hive_sync.enable'' = ''true'', // -- 启用hive同步 +''hive_sync.mode'' = ''hms'', // -- 启用hive hms同步,默认jdbc +''hive_sync.metastore.uris'' = ''thrift://cdh2.vision.com:9083'', // -- required, metastore的端口 +''hive_sync.jdbc_url'' = ''jdbc:hive2://cdh1.vision.com:10000'', // -- required, hiveServer地址 +''hive_sync.table'' = ''order_mysql_goods_order'', // -- required, hive 新建的表名 会自动同步hudi的表结构和数据到hive +''hive_sync.db'' = ''cdc_ods'', // -- required, hive 新建的数据库名 +''hive_sync.username'' = ''hive'', // -- required, HMS 用户名 +''hive_sync.password'' = ''123456'', // -- required, HMS 密码 +''hive_sync.skip_ro_suffix'' = ''true'' // -- 去除ro后缀 +);', '1.14', 0, 1 + , '2022-01-20 17:56:50', '2022-01-20 17:56:50'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 21, 'Function', '内置函数', '比较函数', 'value1 <> value2' + , '如果value1不等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} <> ${2:}', '1.12', 4, 1 + , '2021-02-22 10:05:38', '2021-03-11 09:58:48'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 22, 'Function', '内置函数', '比较函数', 'value1 > value2' + , '如果value1大于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} > ${2:}', '1.12', 2, 1 + , '2021-02-22 14:37:58', '2021-03-10 11:58:06'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 23, 'Function', '内置函数', '比较函数', 'value1 >= value2' + , '如果value1大于或等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} >= ${2:}', '1.12', 2, 1 + , '2021-02-22 14:38:52', '2022-03-29 19:05:54'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 24, 'Function', '内置函数', '比较函数', 'value1 < value2' + , '如果value1小于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} < ${2:}', '1.12', 0, 1 + , '2021-02-22 14:39:15', '2022-03-29 19:04:58'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 25, 'Function', '内置函数', '比较函数', 'value1 <= value2' + , '如果value1小于或等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} <= ${2:}', '1.12', 0 + , 1 + , '2021-02-22 14:39:40', '2022-03-29 19:05:17'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 26, 'Function', '内置函数', '比较函数', 'value IS NULL' + , '如果value为NULL,则返回TRUE 。', '${1:} IS NULL', '1.12', 2, 1 + , '2021-02-22 14:40:39', '2021-03-10 11:57:51'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 27, 'Function', '内置函数', '比较函数', 'value IS NOT NULL' + , '如果value不为NULL,则返回TRUE 。', '${1:} IS NOT NULL', '1.12', 0, 1 + , '2021-02-22 14:41:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 28, 'Function', '内置函数', '比较函数', 'value1 IS DISTINCT FROM value2' + , '如果两个值不相等则返回TRUE。NULL值在这里被视为相同的值。', '${1:} IS DISTINCT FROM ${2:}', '1.12', 0, 1 + , '2021-02-22 14:42:39', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 29, 'Function', '内置函数', '比较函数', 'value1 IS NOT DISTINCT FROM value2' + , '如果两个值相等则返回TRUE。NULL值在这里被视为相同的值。', '${1:} IS NOT DISTINCT FROM ${2:}', '1.12', 0, 1 + , '2021-02-22 14:43:23', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 30, 'Function', '内置函数', '比较函数', 'value1 BETWEEN [ ASYMMETRIC | SYMMETRIC ] value2 AND value3' + , '如果value1大于或等于value2和小于或等于value3 返回true', '${1:} BETWEEN ${2:} AND ${3:}', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 31, 'Function', '内置函数', '比较函数', 'value1 NOT BETWEEN [ ASYMMETRIC | SYMMETRIC ] value2 AND value3' + , '如果value1小于value2或大于value3 返回true', '${1:} NOT BETWEEN ${2:} AND ${3:}', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 32, 'Function', '内置函数', '比较函数', 'string1 LIKE string2 [ ESCAPE char ]' + , '如果STRING1匹配模式STRING2,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。', '${1:} LIKE ${2:}', '1.12' + , 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 33, 'Function', '内置函数', '比较函数', 'string1 NOT LIKE string2 [ ESCAPE char ]' + , '如果STRING1不匹配模式STRING2,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。', '${1:} NOT LIKE ${2:}' + , '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 34, 'Function', '内置函数', '比较函数', 'string1 SIMILAR TO string2 [ ESCAPE char ]' + , '如果STRING1与SQL正则表达式STRING2匹配,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。' + , '${1:} SIMILAR TO ${2:}', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-03-10 11:57:28'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 35, 'Function', '内置函数', '比较函数', 'string1 NOT SIMILAR TO string2 [ ESCAPE char ]' + , '如果STRING1与SQL正则表达式STRING2不匹配,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。' + , '${1:} NOT SIMILAR TO ${2:}', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 36, 'Function', '内置函数', '比较函数', 'value1 IN (value2 [, value3]* )' + , '如果value1存在于给定列表(value2,value3,...)中,则返回TRUE 。 + +当(value2,value3,...)包含NULL,如果可以找到该元素,则返回TRUE,否则返回UNKNOWN。 + +如果value1为NULL,则始终返回UNKNOWN 。', '${1:} IN (${2:} )', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 37, 'Function', '内置函数', '比较函数', 'value1 NOT IN (value2 [, value3]* )' + , '如果value1不存在于给定列表(value2,value3,...)中,则返回TRUE 。 + +当(value2,value3,...)包含NULL,如果可以找到该元素,则返回TRUE,否则返回UNKNOWN。 + +如果value1为NULL,则始终返回UNKNOWN 。', '${1:} NOT IN (${2:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 38, 'Function', '内置函数', '比较函数', 'EXISTS (sub-query)' + , '如果value存在于子查询中,则返回TRUE。', 'EXISTS (${1:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 39, 'Function', '内置函数', '比较函数', 'value IN (sub-query)' + , '如果value存在于子查询中,则返回TRUE。', '${1:} IN (${2:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 40, 'Function', '内置函数', '比较函数', 'value NOT IN (sub-query)' + , '如果value不存在于子查询中,则返回TRUE。', '${1:} NOT IN (${2:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 41, 'Function', '内置函数', '逻辑函数', 'boolean1 OR boolean2' + , '如果BOOLEAN1为TRUE或BOOLEAN2为TRUE,则返回TRUE。支持三值逻辑。 + +例如,true || Null(Types.BOOLEAN)返回TRUE。', '${1:} OR ${2:}', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 42, 'Function', '内置函数', '逻辑函数', 'boolean1 AND boolean2' + , '如果BOOLEAN1和BOOLEAN2均为TRUE,则返回TRUE。支持三值逻辑。 + +例如,true && Null(Types.BOOLEAN)返回未知。', '${1:} AND ${2:}', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 43, 'Function', '内置函数', '逻辑函数', 'NOT boolean' + , '如果BOOLEAN为FALSE,则返回TRUE ;如果BOOLEAN为TRUE,则返回FALSE 。 + +如果BOOLEAN为UNKNOWN,则返回UNKNOWN。', 'NOT ${1:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 44, 'Function', '内置函数', '逻辑函数', 'boolean IS FALSE' + , '如果BOOLEAN为FALSE,则返回TRUE ;如果BOOLEAN为TRUE或UNKNOWN,则返回FALSE 。', '${1:} IS FALSE', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 45, 'Function', '内置函数', '逻辑函数', 'boolean IS NOT FALSE' + , '如果BOOLEAN为TRUE或UNKNOWN,则返回TRUE ;如果BOOLEAN为FALSE,则返回FALSE。', '${1:} IS NOT FALSE', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 46, 'Function', '内置函数', '逻辑函数', 'boolean IS TRUE' + , '如果BOOLEAN为TRUE,则返回TRUE;如果BOOLEAN为FALSE或UNKNOWN,则返回FALSE 。', '${1:} IS TRUE', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 47, 'Function', '内置函数', '逻辑函数', 'boolean IS NOT TRUE' + , '如果BOOLEAN为FALSE或UNKNOWN,则返回TRUE ;如果BOOLEAN为TRUE,则返回FALSE 。', '${1:} IS NOT TRUE', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 48, 'Function', '内置函数', '逻辑函数', 'boolean IS UNKNOWN' + , '如果BOOLEAN为UNKNOWN,则返回TRUE ;如果BOOLEAN为TRUE或FALSE,则返回FALSE 。', '${1:} IS UNKNOWN', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 49, 'Function', '内置函数', '逻辑函数', 'boolean IS NOT UNKNOWN' + , '如果BOOLEAN为TRUE或FALSE,则返回TRUE ;如果BOOLEAN为UNKNOWN,则返回FALSE 。', '${1:} IS NOT UNKNOWN', '1.12', 0 + , 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 50, 'Function', '内置函数', '算术函数', '+ numeric' + , '返回NUMERIC。', '+ ${1:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 51, 'Function', '内置函数', '算术函数', '- numeric' + , '返回负数NUMERIC。', '- ${1:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 52, 'Function', '内置函数', '算术函数', 'numeric1 + numeric2' + , '返回NUMERIC1加NUMERIC2。', '${1:} + ${2:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 53, 'Function', '内置函数', '算术函数', 'numeric1 - numeric2' + , '返回NUMERIC1减去NUMERIC2。', '${1:} - ${2:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 54, 'Function', '内置函数', '算术函数', 'numeric1 * numeric2' + , '返回NUMERIC1乘以NUMERIC2。', '${1:} * ${2:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 55, 'Function', '内置函数', '算术函数', 'numeric1 / numeric2' + , '返回NUMERIC1除以NUMERIC2。', '${1:} / ${2:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 56, 'Function', '内置函数', '算术函数', 'numeric1 % numeric2' + , '返回NUMERIC1除以NUMERIC2的余数(模)。仅当numeric1为负数时,结果为负数。', '${1:} % ${2:} ', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 57, 'Function', '内置函数', '算术函数', 'POWER(numeric1, numeric2)' + , '返回NUMERIC1的NUMERIC2 次幂。', 'POWER(${1:} , ${2:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 58, 'Function', '内置函数', '算术函数', 'ABS(numeric)' + , '返回NUMERIC的绝对值。', 'ABS(${1:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 59, 'Function', '内置函数', '算术函数', 'MOD(numeric1, numeric2)' + , '返回numeric1除以numeric2的余数(模)。只有当numeric1为负数时,结果才为负数', 'MOD(${1:} , ${2:} )', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 60, 'Function', '内置函数', '算术函数', 'SQRT(numeric)' + , '返回NUMERIC的平方根。', 'SQRT(${1:})', '1.12', 0, 1 + , '2021-02-22 14:44:26', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 61, 'Function', '内置函数', '算术函数', 'LN(numeric)' + , '返回NUMERIC的自然对数(以e为底)。', 'LN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 62, 'Function', '内置函数', '算术函数', 'LOG10(numeric)' + , '返回NUMERIC的以10为底的对数。', 'LOG10(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 63, 'Function', '内置函数', '算术函数', 'LOG2(numeric)' + , '返回NUMERIC的以2为底的对数。', 'LOG2(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 64, 'Function', '内置函数', '算术函数', 'EXP(numeric)' + , '返回e 的 NUMERIC 次幂。', 'EXP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 65, 'Function', '内置函数', '算术函数', 'FLOOR(numeric)' + , '向下舍入NUMERIC,并返回小于或等于NUMERIC的最大整数。', 'FLOOR(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 66, 'Function', '内置函数', '算术函数', 'SIN(numeric)' + , '返回NUMERIC的正弦值。', 'SIN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 67, 'Function', '内置函数', '算术函数', 'SINH(numeric)' + , '返回NUMERIC的双曲正弦值。 + +返回类型为DOUBLE。', 'SINH(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 68, 'Function', '内置函数', '算术函数', 'COS(numeric)' + , '返回NUMERIC的余弦值。', 'COS(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 69, 'Function', '内置函数', '算术函数', 'TAN(numeric)' + , '返回NUMERIC的正切。', 'TAN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 70, 'Function', '内置函数', '算术函数', 'TANH(numeric)' + , '返回NUMERIC的双曲正切值。 + +返回类型为DOUBLE。', 'TANH(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 71, 'Function', '内置函数', '算术函数', 'COT(numeric)' + , '返回NUMERIC的余切。', 'COT(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 72, 'Function', '内置函数', '算术函数', 'ASIN(numeric)' + , '返回NUMERIC的反正弦值。', 'ASIN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 73, 'Function', '内置函数', '算术函数', 'ACOS(numeric)' + , '返回NUMERIC的反余弦值。', 'ACOS(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 74, 'Function', '内置函数', '算术函数', 'ATAN(numeric)' + , '返回NUMERIC的反正切。', 'ATAN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 75, 'Function', '内置函数', '算术函数', 'ATAN2(numeric1, numeric2)' + , '返回坐标的反正切(NUMERIC1,NUMERIC2)。', 'ATAN2(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 76, 'Function', '内置函数', '算术函数', 'COSH(numeric)' + , '返回NUMERIC的双曲余弦值。 + +返回值类型为DOUBLE。', 'COSH(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 77, 'Function', '内置函数', '算术函数', 'DEGREES(numeric)' + , '返回弧度NUMERIC的度数表示形式', 'DEGREES(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 78, 'Function', '内置函数', '算术函数', 'RADIANS(numeric)' + , '返回度数NUMERIC的弧度表示。', 'RADIANS(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 79, 'Function', '内置函数', '算术函数', 'SIGN(numeric)' + , '返回NUMERIC的符号。', 'SIGN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 80, 'Function', '内置函数', '算术函数', 'ROUND(numeric, integer)' + , '返回一个数字,四舍五入为NUMERIC的INT小数位。', 'ROUND(${1:} , ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 81, 'Function', '内置函数', '算术函数', 'PI' + , '返回一个比任何其他值都更接近圆周率的值。', 'PI', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 82, 'Function', '内置函数', '算术函数', 'E()' + , '返回一个比任何其他值都更接近e的值。', 'E()', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 83, 'Function', '内置函数', '算术函数', 'RAND()' + , '返回介于0.0(含)和1.0(不含)之间的伪随机双精度值。', 'RAND()', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 84, 'Function', '内置函数', '算术函数', 'RAND(integer)' + , '返回带有初始种子INTEGER的介于0.0(含)和1.0(不含)之间的伪随机双精度值。 + +如果两个RAND函数具有相同的初始种子,它们将返回相同的数字序列。', 'RAND(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 85, 'Function', '内置函数', '算术函数', 'RAND_INTEGER(integer)' + , '返回介于0(含)和INTEGER(不含)之间的伪随机整数值。', 'RAND_INTEGER(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 86, 'Function', '内置函数', '算术函数', 'RAND_INTEGER(integer1, integer2)' + , '返回介于0(含)和INTEGER2(不含)之间的伪随机整数值,其初始种子为INTEGER1。 + +如果两个randInteger函数具有相同的初始种子和边界,它们将返回相同的数字序列。', 'RAND_INTEGER(${1:} , ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 87, 'Function', '内置函数', '算术函数', 'UUID()' + , '根据RFC 4122 type 4(伪随机生成)UUID返回UUID(通用唯一标识符)字符串 + +(例如,“ 3d3c68f7-f608-473f-b60c-b0c44ad4cc4e”)。使用加密强度高的伪随机数生成器生成UUID。', 'UUID()', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 88, 'Function', '内置函数', '算术函数', 'BIN(integer)' + , '以二进制格式返回INTEGER的字符串表示形式。如果INTEGER为NULL,则返回NULL。 + +例如,4.bin()返回“ 100”并12.bin()返回“ 1100”。', 'BIN(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 89, 'Function', '内置函数', '算术函数', 'HEX(numeric) +HEX(string)' + , '以十六进制格式返回整数NUMERIC值或STRING的字符串表示形式。如果参数为NULL,则返回NULL。 + +例如,数字20导致“ 14”,数字100导致“ 64”,字符串“ hello,world”导致“ 68656C6C6F2C776F726C64”。', 'HEX(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 90, 'Function', '内置函数', '算术函数', 'TRUNCATE(numeric1, integer2)' + , '返回一个小数点后被截断为integer2位的数字。', 'TRUNCATE(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 91, 'Function', '内置函数', '算术函数', 'PI()' + , '返回π (pi)的值。仅在blink planner中支持。', 'PI()', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 92, 'Function', '内置函数', '算术函数', 'LOG(numeric1)' + , '如果不带参数调用,则返回NUMERIC1的自然对数。当使用参数调用时,将NUMERIC1的对数返回到基数NUMERIC2。 + +注意:当前,NUMERIC1必须大于0,而NUMERIC2必须大于1。', 'LOG(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 93, 'Function', '内置函数', '算术函数', 'LOG(numeric1, numeric2)' + , '如果不带参数调用,则返回NUMERIC1的自然对数。当使用参数调用时,将NUMERIC1的对数返回到基数NUMERIC2。 + +注意:当前,NUMERIC1必须大于0,而NUMERIC2必须大于1。', 'LOG(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 94, 'Function', '内置函数', '算术函数', 'CEIL(numeric)' + , '将NUMERIC向上舍入,并返回大于或等于NUMERIC的最小整数。', 'CEIL(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 95, 'Function', '内置函数', '算术函数', 'CEILING(numeric)' + , '将NUMERIC向上舍入,并返回大于或等于NUMERIC的最小整数。', 'CEILING(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 96, 'Function', '内置函数', '字符串函数', 'string1 || string2' + , '返回string1和string2的连接。', '${1:} || ${2:}', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 97, 'Function', '内置函数', '字符串函数', 'UPPER(string)' + , '以大写形式返回STRING。', 'UPPER(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 98, 'Function', '内置函数', '字符串函数', 'LOWER(string)' + , '以小写形式返回STRING。', 'LOWER(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 99, 'Function', '内置函数', '字符串函数', 'POSITION(string1 IN string2)' + , '返回STRING1在STRING2中第一次出现的位置(从1开始); + +如果在STRING2中找不到STRING1,则返回0 。', 'POSITION(${1:} IN ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 100, 'Function', '内置函数', '字符串函数', 'TRIM([ BOTH | LEADING | TRAILING ] string1 FROM string2)' + , '返回一个字符串,该字符串从STRING中删除前导和/或结尾字符。', 'TRIM(${1:} FROM ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 101, 'Function', '内置函数', '字符串函数', 'LTRIM(string)' + , '返回一个字符串,该字符串从STRING除去左空格。 + +例如," This is a test String.".ltrim()返回“This is a test String.”。', 'LTRIM(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 102, 'Function', '内置函数', '字符串函数', 'RTRIM(string)' + , '返回一个字符串,该字符串从STRING中删除正确的空格。 + +例如,"This is a test String. ".rtrim()返回“This is a test String.”。', 'RTRIM(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 103, 'Function', '内置函数', '字符串函数', 'REPEAT(string, integer)' + , '返回一个字符串,该字符串重复基本STRING INT次。 + +例如,"This is a test String.".repeat(2)返回“This is a test String.This is a test String.”。', 'REPEAT(${1:}, ${2:})' + , '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 104, 'Function', '内置函数', '字符串函数', 'REGEXP_REPLACE(string1, string2, string3)' + , '返回字符串STRING1所有匹配正则表达式的子串STRING2连续被替换STRING3。 + +例如,"foobar".regexpReplace("oo|ar", "")返回“ fb”。', 'REGEXP_REPLACE(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 105, 'Function', '内置函数', '字符串函数', 'OVERLAY(string1 PLACING string2 FROM integer1 [ FOR integer2 ])' + , '从位置INT1返回一个字符串,该字符串将STRING1的INT2(默认为STRING2的长度)字符替换为STRING2' + , 'OVERLAY(${1:} PLACING ${2:} FROM ${3:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 106, 'Function', '内置函数', '字符串函数', 'SUBSTRING(string FROM integer1 [ FOR integer2 ])' + , '返回字符串STRING的子字符串,从位置INT1开始,长度为INT2(默认为结尾)。', 'SUBSTRING${1:} FROM ${2:} )', '1.12', 0 + , 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 107, 'Function', '内置函数', '字符串函数', 'REPLACE(string1, string2, string3)' + , '返回一个新字符串替换其中出现的所有STRING2与STRING3(非重叠)从STRING1。', 'REPLACE(${1:} , ${2:} , ${3:} )' + , '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 108, 'Function', '内置函数', '字符串函数', 'REGEXP_EXTRACT(string1, string2[, integer])' + , '从STRING1返回一个字符串,该字符串使用指定的正则表达式STRING2和正则表达式匹配组索引INTEGER1提取。' + , 'REGEXP_EXTRACT(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 109, 'Function', '内置函数', '字符串函数', 'INITCAP(string)' + , '返回一种新形式的STRING,其中每个单词的第一个字符转换为大写,其余字符转换为小写。', 'INITCAP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 110, 'Function', '内置函数', '字符串函数', 'CONCAT(string1, string2,...)' + , '返回连接STRING1,STRING2,...的字符串。如果任何参数为NULL,则返回NULL。', 'CONCAT(${1:} , ${2:} , ${3:} )', '1.12' + , 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 111, 'Function', '内置函数', '字符串函数', 'CONCAT_WS(string1, string2, string3,...)' + , '返回一个字符串,会连接STRING2,STRING3,......与分离STRING1。', 'CONCAT_WS(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 112, 'Function', '内置函数', '字符串函数', 'LPAD(string1, integer, string2)' + , '返回一个新字符串,该字符串从STRING1的左侧填充STRING2,长度为INT个字符。', 'LPAD(${1:} , ${2:} , ${3:} )', '1.12' + , 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 113, 'Function', '内置函数', '字符串函数', 'RPAD(string1, integer, string2)' + , '返回一个新字符串,该字符串从STRING1右侧填充STRING2,长度为INT个字符。', 'RPAD(${1:} , ${2:} , ${3:} )', '1.12', 0 + , 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 114, 'Function', '内置函数', '字符串函数', 'FROM_BASE64(string)' + , '返回来自STRING的base64解码结果;如果STRING为NULL,则返回null 。', 'FROM_BASE64(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 115, 'Function', '内置函数', '字符串函数', 'TO_BASE64(string)' + , '从STRING返回base64编码的结果;如果STRING为NULL,则返回NULL。', 'TO_BASE64(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 116, 'Function', '内置函数', '字符串函数', 'ASCII(string)' + , '返回字符串的第一个字符的数值。如果字符串为NULL,则返回NULL。仅在blink planner中支持。', 'ASCII(${1:})', '1.12', 0 + , 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 117, 'Function', '内置函数', '字符串函数', 'CHR(integer)' + , '返回与integer在二进制上等价的ASCII字符。如果integer大于255,我们将首先得到integer的模数除以255,并返回模数的CHR。如果integer为NULL,则返回NULL。仅在blink planner中支持。' + , 'CHR(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 118, 'Function', '内置函数', '字符串函数', 'DECODE(binary, string)' + , '使用提供的字符集(''US-ASCII'', ''ISO-8859-1'', ''UTF-8'', ''UTF-16BE'', ''UTF-16LE'', ''UTF-16''之一)将第一个参数解码为字符串。如果任意一个参数为空,结果也将为空。仅在blink planner中支持。' + , 'DECODE(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 119, 'Function', '内置函数', '字符串函数', 'ENCODE(string1, string2)' + , '使用提供的string2字符集(''US-ASCII'', ''ISO-8859-1'', ''UTF-8'', ''UTF-16BE'', ''UTF-16LE'', ''UTF-16''之一)将string1编码为二进制。如果任意一个参数为空,结果也将为空。仅在blink planner中支持。' + , 'ENCODE(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 120, 'Function', '内置函数', '字符串函数', 'INSTR(string1, string2)' + , '返回string2在string1中第一次出现的位置。如果任何参数为空,则返回NULL。仅在blink planner中支持。' + , 'INSTR(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 121, 'Function', '内置函数', '字符串函数', 'LEFT(string, integer)' + , '返回字符串中最左边的整数字符。如果整数为负,则返回空字符串。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。' + , 'LEFT(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 122, 'Function', '内置函数', '字符串函数', 'RIGHT(string, integer)' + , '返回字符串中最右边的整数字符。如果整数为负,则返回空字符串。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。' + , 'RIGHT(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 123, 'Function', '内置函数', '字符串函数', 'LOCATE(string1, string2[, integer])' + , '返回string1在string2中的位置整数之后第一次出现的位置。如果没有找到,返回0。如果任何参数为NULL,则返回NULL仅在blink planner中支持。' + , 'LOCATE(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 124, 'Function', '内置函数', '字符串函数', 'PARSE_URL(string1, string2[, string3])' + , '从URL返回指定的部分。string2的有效值包括''HOST'', ''PATH'', ''QUERY'', ''REF'', ''PROTOCOL'', ''AUTHORITY'', ''FILE''和''USERINFO''。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。' + , 'PARSE_URL(${1:} , ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 125, 'Function', '内置函数', '字符串函数', 'REGEXP(string1, string2)' + , '如果string1的任何子字符串(可能为空)与Java正则表达式string2匹配,则返回TRUE,否则返回FALSE。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。' + , 'REGEXP(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 126, 'Function', '内置函数', '字符串函数', 'REVERSE(string)' + , '返回反向字符串。如果字符串为NULL,则返回NULL仅在blink planner中支持。', 'REVERSE(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 127, 'Function', '内置函数', '字符串函数', 'SPLIT_INDEX(string1, string2, integer1)' + , '通过分隔符string2拆分string1,返回拆分字符串的整数(从零开始)字符串。如果整数为负,返回NULL。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。' + , 'SPLIT_INDEX(${1:}, ${2:} , ${3:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 128, 'Function', '内置函数', '字符串函数', 'STR_TO_MAP(string1[, string2, string3]])' + , '使用分隔符将string1分割成键/值对后返回一个映射。string2是pair分隔符,默认为'',''。string3是键值分隔符,默认为''=''。仅在blink planner中支持。' + , 'STR_TO_MAP(${1:})', '1.12', 4, 1 + , '2021-02-22 15:29:35', '2021-05-20 19:59:50'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 129, 'Function', '内置函数', '字符串函数', 'SUBSTR(string[, integer1[, integer2]])' + , '返回一个字符串的子字符串,从位置integer1开始,长度为integer2(默认到末尾)。仅在blink planner中支持。' + , 'SUBSTR(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 130, 'Function', '内置函数', '字符串函数', 'CHAR_LENGTH(string)' + , '返回STRING中的字符数。', 'CHAR_LENGTH(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 131, 'Function', '内置函数', '字符串函数', 'CHARACTER_LENGTH(string)' + , '返回STRING中的字符数。', 'CHARACTER_LENGTH(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 132, 'Function', '内置函数', '时间函数', 'DATE string' + , '返回以“ yyyy-MM-dd”形式从STRING解析的SQL日期。', 'DATE(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 133, 'Function', '内置函数', '时间函数', 'TIME string' + , '返回以“ HH:mm:ss”的形式从STRING解析的SQL时间。', 'TIME(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 134, 'Function', '内置函数', '时间函数', 'TIMESTAMP string' + , '返回从STRING解析的SQL时间戳,格式为“ yyyy-MM-dd HH:mm:ss [.SSS]”', 'TIMESTAMP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 135, 'Function', '内置函数', '时间函数', 'INTERVAL string range' + , '解析“dd hh:mm:ss”形式的区间字符串。fff表示毫秒间隔,yyyy-mm表示月间隔。间隔范围可以是天、分钟、天到小时或天到秒,以毫秒为间隔;年或年到月的间隔。' + , 'INTERVAL ${1:} range', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 136, 'Function', '内置函数', '时间函数', 'CURRENT_DATE' + , '返回UTC时区中的当前SQL日期。', 'CURRENT_DATE', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 137, 'Function', '内置函数', '时间函数', 'CURRENT_TIME' + , '返回UTC时区的当前SQL时间。', 'CURRENT_TIME', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 138, 'Function', '内置函数', '时间函数', 'CURRENT_TIMESTAMP' + , '返回UTC时区内的当前SQL时间戳。', 'CURRENT_TIMESTAMP', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 139, 'Function', '内置函数', '时间函数', 'LOCALTIME' + , '返回本地时区的当前SQL时间。', 'LOCALTIME', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 140, 'Function', '内置函数', '时间函数', 'LOCALTIMESTAMP' + , '返回本地时区的当前SQL时间戳。', 'LOCALTIMESTAMP', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 141, 'Function', '内置函数', '时间函数', 'EXTRACT(timeintervalunit FROM temporal)' + , '返回从时域的timeintervalunit部分提取的长值。', 'EXTRACT(${1:} FROM ${2:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 142, 'Function', '内置函数', '时间函数', 'YEAR(date)' + , '返回SQL date日期的年份。等价于EXTRACT(YEAR FROM date)。', 'YEAR(${1:})', '1.12', 0, 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 143, 'Function', '内置函数', '时间函数', 'QUARTER(date)' + , '从SQL date date返回一年中的季度(1到4之间的整数)。相当于EXTRACT(从日期起四分之一)。', 'QUARTER(${1:})', '1.12', 0 + , 1 + , '2021-02-22 15:29:35', '2021-02-22 15:28:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 144, 'Function', '内置函数', '时间函数', 'MONTH(date)' + , '返回SQL date date中的某月(1到12之间的整数)。等价于EXTRACT(MONTH FROM date)。', 'MONTH(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 145, 'Function', '内置函数', '时间函数', 'WEEK(date)' + , '从SQL date date返回一年中的某个星期(1到53之间的整数)。相当于EXTRACT(从日期开始的星期)。', 'WEEK(${1:})', '1.12' + , 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 146, 'Function', '内置函数', '时间函数', 'DAYOFYEAR(date)' + , '返回SQL date date中的某一天(1到366之间的整数)。相当于EXTRACT(DOY FROM date)。', 'DAYOFYEAR(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 147, 'Function', '内置函数', '时间函数', 'DAYOFMONTH(date)' + , '从SQL date date返回一个月的哪一天(1到31之间的整数)。相当于EXTRACT(DAY FROM date)。', 'DAYOFMONTH(${1:})', '1.12' + , 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 148, 'Function', '内置函数', '时间函数', 'DAYOFWEEK(date)' + , '返回星期几(1到7之间的整数;星期日= 1)从SQL日期日期。相当于提取(道指从日期)。', 'DAYOFWEEK(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 149, 'Function', '内置函数', '时间函数', 'HOUR(timestamp)' + , '从SQL timestamp timestamp返回一天中的小时(0到23之间的整数)。相当于EXTRACT(HOUR FROM timestamp)。', 'HOUR(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 150, 'Function', '内置函数', '时间函数', 'MINUTE(timestamp)' + , '从SQL timestamp timestamp返回一小时的分钟(0到59之间的整数)。相当于EXTRACT(分钟从时间戳)。', 'MINUTE(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 151, 'Function', '内置函数', '时间函数', 'SECOND(timestamp)' + , '从SQL时间戳返回一分钟中的秒(0到59之间的整数)。等价于EXTRACT(从时间戳开始倒数第二)。', 'SECOND(${1:})', '1.12', 0 + , 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 152, 'Function', '内置函数', '时间函数', 'FLOOR(timepoint TO timeintervalunit)' + , '返回一个将timepoint舍入到时间单位timeintervalunit的值。', 'FLOOR(${1:} TO ${2:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 153, 'Function', '内置函数', '时间函数', 'CEIL(timepoint TO timeintervalunit)' + , '返回一个将timepoint舍入到时间单位timeintervalunit的值。', 'CEIL(${1:} TO ${2:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 154, 'Function', '内置函数', '时间函数', '(timepoint1, temporal1) OVERLAPS (timepoint2, temporal2)' + , '如果(timepoint1, temporal1)和(timepoint2, temporal2)定义的两个时间间隔重叠,则返回TRUE。时间值可以是时间点或时间间隔。' + , '(${1:} , ${1:}) OVERLAPS (${2:} , ${2:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 155, 'Function', '内置函数', '时间函数', 'DATE_FORMAT(timestamp, string)' + , '注意这个功能有严重的错误,现在不应该使用。请实现一个自定义的UDF,或者使用EXTRACT作为解决方案。' + , 'DATE_FORMAT(${1:}, ''yyyy-MM-dd'')', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 156, 'Function', '内置函数', '时间函数', 'TIMESTAMPADD(timeintervalunit, interval, timepoint)' + , '返回一个新的时间值,该值将一个(带符号的)整数间隔添加到时间点。间隔的单位由unit参数给出,它应该是以下值之一:秒、分、小时、日、周、月、季度或年。' + , 'TIMESTAMPADD(${1:} , ${2:} , ${3:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 157, 'Function', '内置函数', '时间函数', 'TIMESTAMPDIFF(timepointunit, timepoint1, timepoint2)' + , '返回timepointunit在timepoint1和timepoint2之间的(带符号)数。间隔的单位由第一个参数给出,它应该是以下值之一:秒、分、小时、日、月或年。' + , 'TIMESTAMPDIFF(${1:} , ${2:} , ${3:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 158, 'Function', '内置函数', '时间函数', 'CONVERT_TZ(string1, string2, string3)' + , '将时区string2中的datetime string1(默认ISO时间戳格式''yyyy-MM-dd HH:mm:ss'')转换为时区string3。时区的格式可以是缩写,如“PST”;可以是全名,如“America/Los_Angeles”;或者是自定义ID,如“GMT-8:00”。仅在blink planner中支持。' + , 'CONVERT_TZ(${1:} , ${2:} , ${3:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 159, 'Function', '内置函数', '时间函数', 'FROM_UNIXTIME(numeric[, string])' + , '以字符串格式返回数值参数的表示形式(默认为''yyyy-MM-dd HH:mm:ss'')。numeric是一个内部时间戳值,表示从UTC ''1970-01-01 00:00:00''开始的秒数,例如UNIX_TIMESTAMP()函数生成的时间戳。返回值用会话时区表示(在TableConfig中指定)。仅在blink planner中支持。' + , 'FROM_UNIXTIME(${1:} )', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 160, 'Function', '内置函数', '时间函数', 'UNIX_TIMESTAMP()' + , '获取当前Unix时间戳(以秒为单位)。仅在blink planner中支持。', 'UNIX_TIMESTAMP()', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 161, 'Function', '内置函数', '时间函数', 'UNIX_TIMESTAMP(string1[, string2])' + , '转换日期时间字符串string1,格式为string2(缺省为yyyy-MM-dd HH:mm:ss,如果没有指定)为Unix时间戳(以秒为单位),使用表配置中指定的时区。仅在blink planner中支持。' + , 'UNIX_TIMESTAMP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 162, 'Function', '内置函数', '时间函数', 'TO_DATE(string1[, string2])' + , '将格式为string2的日期字符串string1(默认为''yyyy-MM-dd'')转换为日期。仅在blink planner中支持。', 'TO_DATE(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 163, 'Function', '内置函数', '时间函数', 'TO_TIMESTAMP(string1[, string2])' + , '将会话时区(由TableConfig指定)下的日期时间字符串string1转换为时间戳,格式为string2(默认为''yyyy-MM-dd HH:mm:ss'')。仅在blink planner中支持。' + , 'TO_TIMESTAMP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 164, 'Function', '内置函数', '时间函数', 'NOW()' + , '返回UTC时区内的当前SQL时间戳。仅在blink planner中支持。', 'NOW()', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 165, 'Function', '内置函数', '条件函数', 'CASE value +WHEN value1_1 [, value1_2 ]* THEN result1 +[ WHEN value2_1 [, value2_2 ]* THEN result2 ]* +[ ELSE resultZ ] +END' + , '当第一个时间值包含在(valueX_1, valueX_2,…)中时,返回resultX。如果没有匹配的值,则返回resultZ,否则返回NULL。', 'CASE ${1:} + WHEN ${2:} THEN ${3:} + ELSE ${4:} +END AS ${5:}', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 166, 'Function', '内置函数', '条件函数', 'CASE +WHEN condition1 THEN result1 +[ WHEN condition2 THEN result2 ]* +[ ELSE resultZ ] +END' + , '当第一个条件满足时返回resultX。当不满足任何条件时,如果提供了resultZ则返回resultZ,否则返回NULL。', 'CASE WHEN ${1:} THEN ${2:} + ELSE ${3:} +END AS ${4:}', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 167, 'Function', '内置函数', '条件函数', 'NULLIF(value1, value2)' + , '如果value1等于value2,则返回NULL;否则返回value1。', 'NULLIF(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 168, 'Function', '内置函数', '条件函数', 'COALESCE(value1, value2 [, value3 ]* )' + , '返回value1, value2, ....中的第一个非空值', 'COALESCE(${1:} )', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 169, 'Function', '内置函数', '条件函数', 'IF(condition, true_value, false_value)' + , '如果条件满足则返回true值,否则返回false值。仅在blink planner中支持。', 'IF((${1:}, ${2:}, ${3:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 170, 'Function', '内置函数', '条件函数', 'IS_ALPHA(string)' + , '如果字符串中所有字符都是字母则返回true,否则返回false。仅在blink planner中支持。', 'IS_ALPHA(${1:})', '1.12', 0 + , 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 171, 'Function', '内置函数', '条件函数', 'IS_DECIMAL(string)' + , '如果字符串可以被解析为有效的数字则返回true,否则返回false。仅在blink planner中支持。', 'IS_DECIMAL(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 172, 'Function', '内置函数', '条件函数', 'IS_DIGIT(string)' + , '如果字符串中所有字符都是数字则返回true,否则返回false。仅在blink planner中支持。', 'IS_DIGIT(${1:})', '1.12', 0 + , 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 173, 'Function', '内置函数', '类型转换函数功能', 'CAST(value AS type)' + , '返回一个要转换为type类型的新值。', 'CAST(${1:} AS ${2:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 174, 'Function', '内置函数', 'Collection 函数', 'CARDINALITY(array)' + , '返回数组中元素的数量。', 'CARDINALITY(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 175, 'Function', '内置函数', 'Collection 函数', 'array ‘[’ integer ‘]’' + , '返回数组中位于整数位置的元素。索引从1开始。', 'array[${1:}]', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 176, 'Function', '内置函数', 'Collection 函数', 'ELEMENT(array)' + , '返回数组的唯一元素(其基数应为1);如果数组为空,则返回NULL。如果数组有多个元素,则抛出异常。', 'ELEMENT(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 177, 'Function', '内置函数', 'Collection 函数', 'CARDINALITY(map)' + , '返回map中的条目数。', 'CARDINALITY(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 178, 'Function', '内置函数', 'Collection 函数', 'map ‘[’ value ‘]’' + , '返回map中key value指定的值。', 'map[${1:}]', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 179, 'Function', '内置函数', 'Value Construction函数', 'ARRAY ‘[’ value1 [, value2 ]* ‘]’' + , '返回一个由一系列值(value1, value2,…)创建的数组。', 'ARRAY[ ${1:} ]', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 180, 'Function', '内置函数', 'Value Construction函数', 'MAP ‘[’ value1, value2 [, value3, value4 ]* ‘]’' + , '返回一个从键值对列表((value1, value2), (value3, value4),…)创建的映射。', 'MAP[ ${1:} ]', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 181, 'Function', '内置函数', 'Value Construction函数', 'implicit constructor with parenthesis +(value1 [, value2]*)' + , '返回从值列表(value1, value2,…)创建的行。', '(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 182, 'Function', '内置函数', 'Value Construction函数', 'explicit ROW constructor +ROW(value1 [, value2]*)' + , '返回从值列表(value1, value2,…)创建的行。', 'ROW(${1:}) ', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 183, 'Function', '内置函数', 'Value Access函数', 'tableName.compositeType.field' + , '按名称从Flink复合类型(例如,Tuple, POJO)中返回一个字段的值。', 'tableName.compositeType.field', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 184, 'Function', '内置函数', 'Value Access函数', 'tableName.compositeType.*' + , '返回Flink复合类型(例如,Tuple, POJO)的平面表示,它将每个直接子类型转换为一个单独的字段。在大多数情况下,平面表示的字段的名称与原始字段类似,但使用了$分隔符(例如,mypojo$mytuple$f0)。' + , 'tableName.compositeType.*', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 185, 'Function', '内置函数', '分组函数', 'GROUP_ID()' + , '返回唯一标识分组键组合的整数', 'GROUP_ID()', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 186, 'Function', '内置函数', '分组函数', 'GROUPING(expression1 [, expression2]* ) +GROUPING_ID(expression1 [, expression2]* )' + , '返回给定分组表达式的位向量。', 'GROUPING(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 187, 'Function', '内置函数', 'hash函数', 'MD5(string)' + , '以32位十六进制数字的字符串形式返回string的MD5哈希值;如果字符串为NULL,则返回NULL。', 'MD5(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 188, 'Function', '内置函数', 'hash函数', 'SHA1(string)' + , '返回字符串的SHA-1散列,作为一个由40个十六进制数字组成的字符串;如果字符串为NULL,则返回NULL', 'SHA1(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 189, 'Function', '内置函数', 'hash函数', 'SHA224(string)' + , '以56位十六进制数字的字符串形式返回字符串的SHA-224散列;如果字符串为NULL,则返回NULL。', 'SHA224(${1:})', '1.12' + , 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 190, 'Function', '内置函数', 'hash函数', 'SHA256(string)' + , '以64位十六进制数字的字符串形式返回字符串的SHA-256散列;如果字符串为NULL,则返回NULL。', 'SHA256(${1:})', '1.12' + , 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 191, 'Function', '内置函数', 'hash函数', 'SHA384(string)' + , '以96个十六进制数字的字符串形式返回string的SHA-384散列;如果字符串为NULL,则返回NULL。', 'SHA384(${1:})', '1.12' + , 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 192, 'Function', '内置函数', 'hash函数', 'SHA512(string)' + , '以128位十六进制数字的字符串形式返回字符串的SHA-512散列;如果字符串为NULL,则返回NULL。', 'SHA512(${1:})', '1.12' + , 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 193, 'Function', '内置函数', 'hash函数', 'SHA2(string, hashLength)' + , '使用SHA-2哈希函数族(SHA-224、SHA-256、SHA-384或SHA-512)返回哈希值。第一个参数string是要散列的字符串,第二个参数hashLength是结果的位长度(224、256、384或512)。如果string或hashLength为NULL,则返回NULL。' + , 'SHA2(${1:}, ${2:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 194, 'Function', '内置函数', '聚合函数', 'COUNT([ ALL ] expression | DISTINCT expression1 [, expression2]*)' + , '默认情况下或使用ALL时,返回表达式不为空的输入行数。对每个值的唯一实例使用DISTINCT。', 'COUNT( DISTINCT ${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 195, 'Function', '内置函数', '聚合函数', 'COUNT(*) +COUNT(1)' + , '返回输入行数。', 'COUNT(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 196, 'Function', '内置函数', '聚合函数', 'AVG([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的平均值(算术平均值)。对每个值的唯一实例使用DISTINCT。' + , 'AVG(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 197, 'Function', '内置函数', '聚合函数', 'SUM([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,返回所有输入行表达式的和。对每个值的唯一实例使用DISTINCT。', 'SUM(${1:})', '1.12', 0 + , 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 198, 'Function', '内置函数', '聚合函数', 'MAX([ ALL | DISTINCT ] expression)' + , '默认情况下或使用关键字ALL,返回表达式在所有输入行中的最大值。对每个值的唯一实例使用DISTINCT。', 'MAX(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 199, 'Function', '内置函数', '聚合函数', 'MIN([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的最小值。对每个值的唯一实例使用DISTINCT。', 'MIN(${1:})' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 200, 'Function', '内置函数', '聚合函数', 'STDDEV_POP([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的总体标准差。对每个值的唯一实例使用DISTINCT。' + , 'STDDEV_POP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 201, 'Function', '内置函数', '聚合函数', 'STDDEV_SAMP([ ALL | DISTINCT ] expression)' + , '默认情况下或使用关键字ALL时,返回表达式在所有输入行中的样本标准差。对每个值的唯一实例使用DISTINCT。' + , 'STDDEV_SAMP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 202, 'Function', '内置函数', '聚合函数', 'VAR_POP([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的总体方差(总体标准差的平方)。对每个值的唯一实例使用DISTINCT。' + , 'VAR_POP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 203, 'Function', '内置函数', '聚合函数', 'VAR_SAMP([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的样本方差(样本标准差的平方)。对每个值的唯一实例使用DISTINCT。' + , 'VAR_SAMP(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 204, 'Function', '内置函数', '聚合函数', 'COLLECT([ ALL | DISTINCT ] expression)' + , '默认情况下,或使用关键字ALL,跨所有输入行返回表达式的多集。空值将被忽略。对每个值的唯一实例使用DISTINCT。' + , 'COLLECT(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 205, 'Function', '内置函数', '聚合函数', 'VARIANCE([ ALL | DISTINCT ] expression)' + , 'VAR_SAMP的同义词。仅在blink planner中支持。', 'VARIANCE(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 206, 'Function', '内置函数', '聚合函数', 'RANK()' + , '返回值在一组值中的秩。结果是1加上分区顺序中位于当前行之前或等于当前行的行数。这些值将在序列中产生空白。仅在blink planner中支持。' + , 'RANK()', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 207, 'Function', '内置函数', '聚合函数', 'DENSE_RANK()' + , '返回值在一组值中的秩。结果是1加上前面分配的秩值。与函数rank不同,dense_rank不会在排序序列中产生空隙。仅在blink planner中支持。' + , 'DENSE_RANK()', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 208, 'Function', '内置函数', '聚合函数', 'ROW_NUMBER()' + , '根据窗口分区中的行顺序,为每一行分配一个惟一的连续数字,从1开始。仅在blink planner中支持。', 'ROW_NUMBER()' + , '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 209, 'Function', '内置函数', '聚合函数', 'LEAD(expression [, offset] [, default] )' + , '返回表达式在窗口中当前行之前的偏移行上的值。offset的默认值是1,default的默认值是NULL。仅在blink planner中支持。' + , 'LEAD(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 210, 'Function', '内置函数', '聚合函数', 'LAG(expression [, offset] [, default])' + , '返回表达式的值,该值位于窗口中当前行之后的偏移行。offset的默认值是1,default的默认值是NULL。仅在blink planner中支持。' + , 'LAG(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 211, 'Function', '内置函数', '聚合函数', 'FIRST_VALUE(expression)' + , '返回一组有序值中的第一个值。仅在blink planner中支持。', 'FIRST_VALUE(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 212, 'Function', '内置函数', '聚合函数', 'LAST_VALUE(expression)' + , '返回一组有序值中的最后一个值。仅在blink planner中支持。', 'LAST_VALUE(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 213, 'Function', '内置函数', '聚合函数', 'LISTAGG(expression [, separator])' + , '连接字符串表达式的值,并在它们之间放置分隔符值。分隔符没有添加在字符串的末尾。分隔符的默认值是'',''。仅在blink planner中支持。' + , 'LISTAGG(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 214, 'Function', '内置函数', '列函数', 'withColumns(…)' + , '选择的列', 'withColumns(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 215, 'Function', '内置函数', '列函数', 'withoutColumns(…)' + , '不选择的列', 'withoutColumns(${1:})', '1.12', 0, 1 + , '2021-02-22 15:46:48', '2021-02-22 15:47:21'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 216, 'Function', '内置函数', '比较函数', 'value1 = value2' + , '如果value1等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} =${2:}', '1.12', 9, 1 + , '2021-02-22 10:06:49', '2021-02-24 09:40:30'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 217, 'Function', 'UDF', '表值聚合函数', 'TO_MAP(string1,object2[, string3])' + , '将非规则一维表转化为规则二维表,string1是key。string2是value。string3为非必填项,表示key的值域(维度),用英文逗号分割。' + , 'TO_MAP(${1:})', '1.12', 8, 1 + , '2021-05-20 19:59:22', '2021-05-20 20:00:54'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 218, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE print' + , 'Whole library synchronization print', 'EXECUTE CDCSOURCE demo_print WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\.student,test\\.score'', + ''sink.connector'' = ''print'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:01:45', '2023-10-31 16:02:56'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 219, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE doris' + , 'Whole library synchronization doris', 'EXECUTE CDCSOURCE demo_print WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\.student,test\\.score'', + ''sink.connector'' = ''print'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:02:21', '2023-10-31 16:03:09'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 220, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE demo_doris_schema_evolution' + , 'The entire library is synchronized to doris tape mode evolution', 'EXECUTE CDCSOURCE demo_doris_schema_evolution WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\.student,test\\.score'', + ''sink.connector'' = ''datastream-doris-schema-evolution'', + ''sink.fenodes'' = ''127.0.0.1:8030'', + ''sink.username'' = ''root'', + ''sink.password'' = ''123456'', + ''sink.doris.batch.size'' = ''1000'', + ''sink.sink.max-retries'' = ''1'', + ''sink.sink.batch.interval'' = ''60000'', + ''sink.sink.db'' = ''test'', + ''sink.table.identifier'' = ''${schemaName}.${tableName}'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:04:53', '2023-10-31 16:04:53'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 221, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE StarRocks ' + , 'The entire library is synchronized to StarRocks +', 'EXECUTE CDCSOURCE demo_hudi WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''database-name''=''bigdata'', + ''table-name''=''bigdata\\.products,bigdata\\.orders'', + ''sink.connector''=''hudi'', + ''sink.path''=''hdfs://nameservice1/data/hudi/${tableName}'', + ''sink.hoodie.datasource.write.recordkey.field''=''${pkList}'', + ''sink.hoodie.parquet.max.file.size''=''268435456'', + ''sink.write.tasks''=''1'', + ''sink.write.bucket_assign.tasks''=''2'', + ''sink.write.precombine''=''true'', + ''sink.compaction.async.enabled''=''true'', + ''sink.write.task.max.size''=''1024'', + ''sink.write.rate.limit''=''3000'', + ''sink.write.operation''=''upsert'', + ''sink.table.type''=''COPY_ON_WRITE'', + ''sink.compaction.tasks''=''1'', + ''sink.compaction.delta_seconds''=''20'', + ''sink.compaction.async.enabled''=''true'', + ''sink.read.streaming.skip_compaction''=''true'', + ''sink.compaction.delta_commits''=''20'', + ''sink.compaction.trigger.strategy''=''num_or_time'', + ''sink.compaction.max_memory''=''500'', + ''sink.changelog.enabled''=''true'', + ''sink.read.streaming.enabled''=''true'', + ''sink.read.streaming.check.interval''=''3'', + ''sink.hive_sync.skip_ro_suffix'' = ''true'', + ''sink.hive_sync.enable''=''true'', + ''sink.hive_sync.mode''=''hms'', + ''sink.hive_sync.metastore.uris''=''thrift://bigdata1:9083'', + ''sink.hive_sync.db''=''qhc_hudi_ods'', + ''sink.hive_sync.table''=''${tableName}'', + ''sink.table.prefix.schema''=''true'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:05:50', '2023-10-31 16:08:53'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 222, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_mysql' + , 'The entire library is synchronized to mysql', 'EXECUTE CDCSOURCE demo_startrocks WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector'' = ''starrocks'', + ''sink.jdbc-url'' = ''jdbc:mysql://127.0.0.1:19035'', + ''sink.load-url'' = ''127.0.0.1:18035'', + ''sink.username'' = ''root'', + ''sink.password'' = ''123456'', + ''sink.sink.db'' = ''ods'', + ''sink.table.prefix'' = ''ods_'', + ''sink.table.lower'' = ''true'', + ''sink.database-name'' = ''ods'', + ''sink.table-name'' = ''${tableName}'', + ''sink.sink.properties.format'' = ''json'', + ''sink.sink.properties.strip_outer_array'' = ''true'', + ''sink.sink.max-retries'' = ''10'', + ''sink.sink.buffer-flush.interval-ms'' = ''15000'', + ''sink.sink.parallelism'' = ''1'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:07:08', '2023-10-31 16:08:46'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 223, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE demo_doris' + , 'The entire library is synchronized to mysql', 'EXECUTE CDCSOURCE cdc_mysql WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector'' = ''jdbc'', + ''sink.url'' = ''jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf-8&useSSL=false'', + ''sink.username'' = ''root'', + ''sink.password'' = ''123456'', + ''sink.sink.db'' = ''test'', + ''sink.table.prefix'' = ''test_'', + ''sink.table.lower'' = ''true'', + ''sink.table-name'' = ''${tableName}'', + ''sink.driver'' = ''com.mysql.jdbc.Driver'', + ''sink.sink.buffer-flush.interval'' = ''2s'', + ''sink.sink.buffer-flush.max-rows'' = ''100'', + ''sink.sink.max-retries'' = ''5'', + ''sink.auto.create'' = ''true'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:07:47', '2023-10-31 16:08:39'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 224, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_oracle' + , 'The entire library is synchronized to cdc_oracle', 'EXECUTE CDCSOURCE cdc_oracle WITH ( + ''connector'' = ''oracle-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''1521'', + ''username''=''root'', + ''password''=''123456'', + ''database-name''=''ORCL'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''TEST\\..*'', + ''connector'' = ''jdbc'', + ''url'' = ''jdbc:oracle:thin:@127.0.0.1:1521:orcl'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''table-name'' = ''TEST2.${tableName}'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:08:30', '2023-10-31 16:08:30'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 225, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_kafka_one' + , 'The entire library is synchronized to a topic in kafka', 'EXECUTE CDCSOURCE cdc_kafka_one WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector''=''datastream-kafka'', + ''sink.topic''=''cdctest'', + ''sink.brokers''=''bigdata2:9092,bigdata3:9092,bigdata4:9092'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:10:13', '2023-10-31 16:10:13'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 226, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_kafka_mul' + , 'The entire library is synchronized to a single topic in kafka', 'EXECUTE CDCSOURCE cdc_kafka_mul WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector''=''datastream-kafka'', + ''sink.brokers''=''bigdata2:9092,bigdata3:9092,bigdata4:9092'' +)', 'All Versions', 0, 1 + , '2023-10-31 16:10:59', '2023-10-31 16:10:59'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 227, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_upsert_kafka' + , 'The entire library is synchronized to kafka primary key mode', 'EXECUTE CDCSOURCE cdc_upsert_kafka WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector'' = ''upsert-kafka'', + ''sink.topic'' = ''${tableName}'', + ''sink.properties.bootstrap.servers'' = ''bigdata2:9092,bigdata3:9092,bigdata4:9092'', + ''sink.key.format'' = ''json'', + ''sink.value.format'' = ''json'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:12:14', '2023-10-31 16:12:14'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 228, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_postgresql ' + , 'The entire library is synchronized to postgresql', 'EXECUTE CDCSOURCE cdc_postgresql WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector'' = ''jdbc'', + ''sink.url'' = ''jdbc:postgresql://127.0.0.1:5432/test'', + ''sink.username'' = ''test'', + ''sink.password'' = ''123456'', + ''sink.sink.db'' = ''test'', + ''sink.table.prefix'' = ''test_'', + ''sink.table.lower'' = ''true'', + ''sink.table-name'' = ''${tableName}'', + ''sink.driver'' = ''org.postgresql.Driver'', + ''sink.sink.buffer-flush.interval'' = ''2s'', + ''sink.sink.buffer-flush.max-rows'' = ''100'', + ''sink.sink.max-retries'' = ''5'' +)', 'All Versions', 0, 1 + , '2023-10-31 16:12:54', '2023-10-31 16:12:54'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 229, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_clickhouse' + , 'Sync the entire library to clickhouse', 'EXECUTE CDCSOURCE cdc_clickhouse WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', + ''sink.connector'' = ''clickhouse'', + ''sink.url'' = ''clickhouse://127.0.0.1:8123'', + ''sink.username'' = ''default'', + ''sink.password'' = ''123456'', + ''sink.sink.db'' = ''test'', + ''sink.table.prefix'' = ''test_'', + ''sink.table.lower'' = ''true'', + ''sink.database-name'' = ''test'', + ''sink.table-name'' = ''${tableName}'', + ''sink.sink.batch-size'' = ''500'', + ''sink.sink.flush-interval'' = ''1000'', + ''sink.sink.max-retries'' = ''3'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:13:33', '2023-10-31 16:13:33'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 230, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2hive' + , 'The entire library is synchronized to the sql-catalog of hive', 'EXECUTE CDCSOURCE mysql2hive WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\..*'', + ''sink.connector'' = ''sql-catalog'', + ''sink.catalog.name'' = ''hive'', + ''sink.catalog.type'' = ''hive'', + ''sink.default-database'' = ''hdb'', + ''sink.hive-conf-dir'' = ''/usr/local/dlink/hive-conf'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:14:31', '2023-10-31 16:14:31'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 231, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2paimon' + , 'The entire library is synchronized to paimon', 'EXECUTE CDCSOURCE mysql2paimon WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\..*'', + ''sink.connector'' = ''sql-catalog'', + ''sink.catalog.name'' = ''fts'', + ''sink.catalog.type'' = ''table-store'', + ''sink.catalog.warehouse''=''file:/tmp/table_store'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:15:22', '2023-10-31 16:28:52'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 232, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2dinky_catalog' + , 'The entire library is synchronized to dinky''s built-in catalog', 'EXECUTE CDCSOURCE mysql2dinky_catalog WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\..*'', + ''sink.connector'' = ''sql-catalog'', + ''sink.catalog.name'' = ''dlinkmysql'', + ''sink.catalog.type'' = ''dlink_mysql'', + ''sink.catalog.username'' = ''dlink'', + ''sink.catalog.password'' = ''dlink'', + ''sink.catalog.url'' = ''jdbc:mysql://127.0.0.1:3306/dlink?useUnicode=true&characterEncoding=utf8&serverTimezone=UTC'', + ''sink.sink.db'' = ''default_database'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:16:22', '2023-10-31 16:16:22'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 233, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2multiple_sink' + , 'Synchronization of the entire library to multiple data sources (sink)', 'EXECUTE CDCSOURCE mysql2multiple_sink WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''3000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''table-name'' = ''test\\.student,test\\.score'', + ''sink[0].connector'' = ''doris'', + ''sink[0].fenodes'' = ''127.0.0.1:8030'', + ''sink[0].username'' = ''root'', + ''sink[0].password'' = ''dw123456'', + ''sink[0].sink.batch.size'' = ''1'', + ''sink[0].sink.max-retries'' = ''1'', + ''sink[0].sink.batch.interval'' = ''60000'', + ''sink[0].sink.db'' = ''test'', + ''sink[0].table.prefix'' = ''ODS_'', + ''sink[0].table.upper'' = ''true'', + ''sink[0].table.identifier'' = ''${schemaName}.${tableName}'', + ''sink[0].sink.label-prefix'' = ''${schemaName}_${tableName}_1'', + ''sink[0].sink.enable-delete'' = ''true'', + ''sink[1].connector''=''datastream-kafka'', + ''sink[1].topic''=''cdc'', + ''sink[1].brokers''=''127.0.0.1:9092'' +)', 'All Versions', 0, 1 + , '2023-10-31 16:17:27', '2023-10-31 16:17:27'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 234, 'Reference', '建表语句', 'Batch/Streaming', 'ADD JAR' + , 'ADD JAR', 'ADD JAR ${1:}; // -- str path ', 'All Versions', 0, 1 + , '2023-10-31 16:19:52', '2023-10-31 16:23:16'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 235, 'Function', 'Other', 'Batch/Streaming', 'SHOW FRAGMENTS' + , 'SHOW FRAGMENTS', 'SHOW FRAGMENTS;', 'All Versions', 0, 1 + , '2023-10-31 16:20:30', '2023-10-31 16:20:30'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 236, 'Function', 'Other', 'Batch/Streaming', 'SHOW FRAGMENT var1' + , 'SHOW FRAGMENT var1', 'SHOW FRAGMENT ${1:};', 'All Versions', 0, 1 + , '2023-10-31 16:21:23', '2023-10-31 16:21:23'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 237, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE demo_hudi' + , 'The entire library is synchronized to hudi', 'EXECUTE CDCSOURCE demo_hudi WITH ( + ''connector'' = ''mysql-cdc'', + ''hostname'' = ''127.0.0.1'', + ''port'' = ''3306'', + ''username'' = ''root'', + ''password'' = ''123456'', + ''checkpoint'' = ''10000'', + ''scan.startup.mode'' = ''initial'', + ''parallelism'' = ''1'', + ''database-name''=''bigdata'', + ''table-name''=''bigdata\\.products,bigdata\\.orders'', + ''sink.connector''=''hudi'', + ''sink.path''=''hdfs://nameservice1/data/hudi/${tableName}'', + ''sink.hoodie.datasource.write.recordkey.field''=''${pkList}'', + ''sink.hoodie.parquet.max.file.size''=''268435456'', + ''sink.write.tasks''=''1'', + ''sink.write.bucket_assign.tasks''=''2'', + ''sink.write.precombine''=''true'', + ''sink.compaction.async.enabled''=''true'', + ''sink.write.task.max.size''=''1024'', + ''sink.write.rate.limit''=''3000'', + ''sink.write.operation''=''upsert'', + ''sink.table.type''=''COPY_ON_WRITE'', + ''sink.compaction.tasks''=''1'', + ''sink.compaction.delta_seconds''=''20'', + ''sink.compaction.async.enabled''=''true'', + ''sink.read.streaming.skip_compaction''=''true'', + ''sink.compaction.delta_commits''=''20'', + ''sink.compaction.trigger.strategy''=''num_or_time'', + ''sink.compaction.max_memory''=''500'', + ''sink.changelog.enabled''=''true'', + ''sink.read.streaming.enabled''=''true'', + ''sink.read.streaming.check.interval''=''3'', + ''sink.hive_sync.skip_ro_suffix'' = ''true'', + ''sink.hive_sync.enable''=''true'', + ''sink.hive_sync.mode''=''hms'', + ''sink.hive_sync.metastore.uris''=''thrift://bigdata1:9083'', + ''sink.hive_sync.db''=''qhc_hudi_ods'', + ''sink.hive_sync.table''=''${tableName}'', + ''sink.table.prefix.schema''=''true'' +);', 'All Versions', 0, 1 + , '2023-10-31 16:24:47', '2023-10-31 16:24:47'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 238, 'Reference', 'Other', 'Batch/Streaming', 'EXECUTE JAR ' + , 'EXECUTE JAR use sql', 'EXECUTE JAR WITH ( +''uri''=''file:///opt/flink/lib/paimon-flink-1.16-0.5-20230818.001833-127.jar'', +''main-class''=''org.apache.paimon.flink.action.FlinkActions'', +''args''=''mysql-sync-table // --warehouse hdfs:///save // --database cdc-test // --table cdc_test1 // --primary-keys id // --mysql-conf hostname=121.5.136.161 // --mysql-conf port=3371 // --mysql-conf username=root // --mysql-conf password=dinky // --mysql-conf database-name=cdc-test // --mysql-conf table-name=table_1 // --mysql-conf server-time-zone=Asia/Shanghai // --table-conf bucket=4 // --table-conf changelog-producer=input // --table-conf sink.parallelism=1'', +''parallelism''='''', +''savepoint-path''='''' +);', 'All Versions', 0, 1 + , '2023-10-31 16:27:53', '2023-10-31 16:27:53'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 239, 'Reference', '内置函数', 'Streaming', 'PRINT tablename' + , 'PRINT table data', 'PRINT ${1:}', 'All Versions', 0, 1 + , '2023-10-31 16:30:22', '2023-10-31 16:30:22'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 240, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE Like' + , 'CREATE TABLE Like source table', 'DROP TABLE IF EXISTS sink_table; +CREATE TABLE IF not EXISTS sink_table +WITH ( + ''topic'' = ''motor_vehicle_error'' +) +LIKE source_table;', 'All Versions', 0, 1 + , '2023-10-31 16:33:38', '2023-10-31 16:33:38'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 241, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE like source_table EXCLUDING' + , 'CREATE TABLE like source_table EXCLUDING', 'DROP TABLE IF EXISTS sink_table; +CREATE TABLE IF not EXISTS sink_table( + // -- Add watermark definition + WATERMARK FOR order_time AS order_time - INTERVAL ''5'' SECOND +) +WITH ( + ''topic'' = ''motor_vehicle_error'' +) +LIKE source_table ( + // -- Exclude everything besides the computed columns which we need to generate the watermark for. + // -- We do not want to have the partitions or filesystem options as those do not apply to kafka. + EXCLUDING ALL + INCLUDING GENERATED +);', 'All Versions', 0, 1 + , '2023-10-31 16:36:13', '2023-10-31 16:36:13'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 242, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE ctas_kafka' + , 'CREATE TABLE ctas_kafka', 'CREATE TABLE my_ctas_table +WITH ( + ''connector'' = ''kafka'' +) +AS SELECT id, name, age FROM source_table WHERE mod(id, 10) = 0;', 'All Versions', 0, 1 + , '2023-10-31 16:37:33', '2023-10-31 16:47:17'); +INSERT INTO `dinky_flink_document` ( `id`, `category`, `type`, `subtype`, `name` + , `description`, `fill_value`, `version`, `like_num`, `enabled` + , `create_time`, `update_time`) +VALUES ( 243, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE rtas_kafka' + , 'CREATE TABLE rtas_kafka', 'CREATE OR REPLACE TABLE my_ctas_table +WITH ( + ''connector'' = ''kafka'' +) +AS SELECT id, name, age FROM source_table WHERE mod(id, 10) = 0;', 'All Versions', 0, 1 + , '2023-10-31 16:41:46', '2023-10-31 16:43:29'); + diff --git a/dinky-admin/src/main/resources/db/db-h2.sql b/dinky-admin/src/main/resources/db/db-h2.sql deleted file mode 100644 index f88d16598a..0000000000 --- a/dinky-admin/src/main/resources/db/db-h2.sql +++ /dev/null @@ -1,2908 +0,0 @@ -SET NAMES utf8mb4; -SET FOREIGN_KEY_CHECKS = 0; -DROP TABLE IF EXISTS `dinky_alert_group`; -CREATE TABLE `dinky_alert_group` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `name` varchar(50) NOT null COMMENT 'alert group name', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `alert_instance_ids` text null COMMENT 'Alert instance IDS', - `note` varchar(255) null DEFAULT null COMMENT 'note', - `enabled` tinyint(4) null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_alert_history`; -CREATE TABLE `dinky_alert_history` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `alert_group_id` int(11) null DEFAULT null COMMENT 'Alert group ID', - `job_instance_id` int(11) null DEFAULT null COMMENT 'job instance ID', - `title` varchar(255) null DEFAULT null COMMENT 'alert title', - `content` text null COMMENT 'content description', - `status` int(11) null DEFAULT null COMMENT 'alert status', - `log` text null COMMENT 'log', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_alert_instance`; -CREATE TABLE `dinky_alert_instance` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `name` varchar(50) NOT null COMMENT 'alert instance name', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `type` varchar(50) null DEFAULT null COMMENT 'alert instance type such as: DingTalk,Wechat(Webhook,app) Feishu ,email', - `params` text null COMMENT 'configuration', - `enabled` tinyint(4) null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_catalogue`; -CREATE TABLE `dinky_catalogue` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `task_id` int(11) null DEFAULT null COMMENT 'Job ID', - `name` varchar(100) NOT null COMMENT 'Job Name', - `type` varchar(50) null DEFAULT null COMMENT 'Job Type', - `parent_id` int(11) NOT null DEFAULT 0 COMMENT 'parent ID', - `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', - `is_leaf` tinyint(1) NOT null COMMENT 'is leaf node', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_cluster`; -CREATE TABLE `dinky_cluster` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `name` varchar(255) NOT null COMMENT 'cluster instance name', - `alias` varchar(255) null DEFAULT null COMMENT 'cluster instance alias', - `type` varchar(50) null DEFAULT null COMMENT 'cluster types', - `hosts` text null COMMENT 'cluster hosts', - `job_manager_host` varchar(255) null DEFAULT null COMMENT 'Job Manager Host', - `version` varchar(20) null DEFAULT null COMMENT 'version', - `status` int(11) null DEFAULT null COMMENT 'cluster status', - `note` varchar(255) null DEFAULT null COMMENT 'note', - `auto_registers` tinyint(1) null DEFAULT 0 COMMENT 'is auto registration', - `cluster_configuration_id` int(11) null DEFAULT null COMMENT 'cluster configuration id', - `task_id` int(11) null DEFAULT null COMMENT 'task ID', - `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_cluster_configuration`; -CREATE TABLE `dinky_cluster_configuration` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `name` varchar(255) NOT null COMMENT 'cluster configuration name', - `type` varchar(50) null DEFAULT null COMMENT 'cluster type', - `config_json` text null COMMENT 'json of configuration', - `is_available` tinyint(1) NOT null DEFAULT 0 COMMENT 'is available', - `note` varchar(255) null DEFAULT null COMMENT 'note', - `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_database`; -CREATE TABLE `dinky_database` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `name` varchar(30) NOT null COMMENT 'database name', - `group_name` varchar(255) null DEFAULT 'Default' COMMENT 'database belong group name', - `type` varchar(50) NOT null COMMENT 'database type', - `ip` varchar(255) null DEFAULT null COMMENT 'database ip', - `port` int(11) null DEFAULT null COMMENT 'database port', - `url` varchar(255) null DEFAULT null COMMENT 'database url', - `username` varchar(50) null DEFAULT null COMMENT 'username', - `password` varchar(512) null DEFAULT null COMMENT 'password', - `note` varchar(255) null DEFAULT null COMMENT 'note', - `flink_config` text null COMMENT 'Flink configuration', - `flink_template` text null COMMENT 'Flink template', - `db_version` varchar(255) null DEFAULT null COMMENT 'version,such as: 11g of oracle ,2.2.3 of hbase', - `status` tinyint(1) null DEFAULT null COMMENT 'heartbeat status', - `health_time` datetime(0) null DEFAULT null COMMENT 'last heartbeat time of trigger', - `heartbeat_time` datetime(0) null DEFAULT null COMMENT 'last heartbeat time', - `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_flink_document`; -CREATE TABLE `dinky_flink_document` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `category` varchar(255) null DEFAULT null COMMENT 'document category', - `type` varchar(255) null DEFAULT null COMMENT 'document type', - `subtype` varchar(255) null DEFAULT null COMMENT 'document subtype', - `name` varchar(255) null DEFAULT null COMMENT 'document name', - `description` longtext null, - `fill_value` longtext null COMMENT 'fill value', - `version` varchar(255) null DEFAULT null COMMENT 'document version such as:(flink1.12,flink1.13,flink1.14,flink1.15)', - `like_num` int(11) null DEFAULT 0 COMMENT 'like number', - `enabled` tinyint(1) NOT null DEFAULT 0 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update_time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; - - -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (1, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.buffer-capacity' - , '异步查找连接可以触发的最大异步操作的操作数。 -The max number of async i/o operation that the async lookup join can trigger.', 'Set ''table.exec.async-lookup.buffer-capacity''=''100'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (2, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.timeout' - , '异步操作完成的超时时间。 -The async timeout for the asynchronous operation to complete.', 'Set ''table.exec.async-lookup.timeout''=''3 min'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (3, 'Variable', '优化参数', 'Batch', 'set table.exec.disabled-operators' - , '禁用指定operators,用逗号分隔 -Mainly for testing. A comma-separated list of operator names, each name represents a kind of disabled operator. Operators that can be disabled include "NestedLoopJoin", "ShuffleHashJoin", "BroadcastHashJoin", "SortMergeJoin", "HashAgg", "SortAgg". By default no operator is disabled.', 'Set ''table.exec.disabled-operators''=''SortMergeJoin'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (4, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.allow-latency' - , '最大等待时间可用于MiniBatch缓冲输入记录。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。注意:如果将table.exec.mini-batch.enabled设置为true,则其值必须大于零.', 'Set ''table.exec.mini-batch.allow-latency''=''-1 ms'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (5, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.enabled' - , '指定是否启用MiniBatch优化。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。默认情况下禁用此功能。 要启用此功能,用户应将此配置设置为true。注意:如果启用了mini batch 处理,则必须设置“ table.exec.mini-batch.allow-latency”和“ table.exec.mini-batch.size”.', 'Set ''table.exec.mini-batch.enabled''=''false'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (6, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.size' - , '可以为MiniBatch缓冲最大输入记录数。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。 注意:MiniBatch当前仅适用于非窗口聚合。如果将table.exec.mini-batch.enabled设置为true,则其值必须为正.', 'Set ''table.exec.mini-batch.size''=''-1'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (7, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.resource.default-parallelism' - , '设置所有Operator的默认并行度。 -Sets default parallelism for all operators (such as aggregate, join, filter) to run with parallel instances. This config has a higher priority than parallelism of StreamExecutionEnvironment (actually, this config overrides the parallelism of StreamExecutionEnvironment). A value of -1 indicates that no default parallelism is set, then it will fallback to use the parallelism of StreamExecutionEnvironment.', 'Set ''table.exec.resource.default-parallelism''=''1'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (8, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.sink.not-null-enforcer' - , '对表的NOT NULL列约束强制执行不能将空值插入到表中。Flink支持“error”(默认)和“drop”强制行为 -The NOT NULL column constraint on a table enforces that null values can''t be inserted into the table. Flink supports ''error'' (default) and ''drop'' enforcement behavior. By default, Flink will check values and throw runtime exception when null values writing into NOT NULL columns. Users can change the behavior to ''drop'' to silently drop such records without throwing exception. -Possible values: -"ERROR" -"DROP"', 'Set ''table.exec.sink.not-null-enforcer''=''ERROR'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (9, 'Variable', '优化参数', 'Streaming', 'set table.exec.sink.upsert-materialize' - , '由于分布式系统中 Shuffle 导致 ChangeLog 数据混乱,Sink 接收到的数据可能不是全局 upsert 的顺序。因此,在 upsert sink 之前添加 upsert materialize 运算符。它接收上游的变更日志记录并为下游生成一个 upsert 视图。默认情况下,当唯一键出现分布式无序时,会添加具体化操作符。您也可以选择不实现(NONE)或强制实现(FORCE)。 -Possible values: -"NONE" -"FORCE" -"AUTO"', 'Set ''table.exec.sink.upsert-materialize''=''AUTO'';', '1.14', 0, 1 - , '2022-01-20 15:00:00', '2022-01-20 15:00:00');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (10, 'Module', '建表语句', 'Other', 'create.table.kafka' - , 'kafka快速建表格式', 'CREATE TABLE Kafka_Table ( - `event_time` TIMESTAMP(3) METADATA FROM ''timestamp'', - `partition` BIGINT METADATA VIRTUAL, - `offset` BIGINT METADATA VIRTUAL, - `user_id` BIGINT, - `item_id` BIGINT, - `behavior` STRING -) WITH ( - ''connector'' = ''kafka'', - ''topic'' = ''user_behavior'', - ''properties.bootstrap.servers'' = ''localhost:9092'', - ''properties.group.id'' = ''testGroup'', - ''scan.startup.mode'' = ''earliest-offset'', - ''format'' = ''csv'' -); ---可选: ''value.fields-include'' = ''ALL'', ---可选: ''json.ignore-parse-errors'' = ''true'', ---可选: ''key.fields-prefix'' = ''k_'',', '1.14', 0, 1 - , '2022-01-20 16:59:18', '2022-01-20 17:57:32');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (11, 'Module', '建表语句', 'Other', 'create.table.doris' - , 'Doris快速建表', 'CREATE TABLE doris_table ( - cid INT, - sid INT, - name STRING, - cls STRING, - score INT, - PRIMARY KEY (cid) NOT ENFORCED -) WITH ( -''connector'' = ''doris'', -''fenodes'' = ''127.0.0.1:8030'' , -''table.identifier'' = ''test.scoreinfo'', -''username'' = ''root'', -''password''='''' -);', '1.14', 0, 1 - , '2022-01-20 17:08:00', '2022-01-20 17:57:26');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (12, 'Module', '建表语句', 'Other', 'create.table.jdbc' - , 'JDBC建表语句', 'CREATE TABLE JDBC_table ( - id BIGINT, - name STRING, - age INT, - status BOOLEAN, - PRIMARY KEY (id) NOT ENFORCED -) WITH ( - ''connector'' = ''jdbc'', - ''url'' = ''jdbc:mysql://localhost:3306/mydatabase'', - ''table-name'' = ''users'', - ''username'' = ''root'', - ''password'' = ''123456'' -); ---可选: ''sink.parallelism''=''1'', ---可选: ''lookup.cache.ttl''=''1000s'',', '1.14', 0, 1 - , '2022-01-20 17:15:26', '2022-01-20 17:57:20');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (13, 'Module', 'CataLog', 'Other', 'create.catalog.hive' - , '创建HIVE的catalog', 'CREATE CATALOG hive WITH ( - ''type'' = ''hive'', - ''default-database'' = ''default'', - ''hive-conf-dir'' = ''/app/wwwroot/MBDC/hive/conf/'', --hive配置文件 - ''hadoop-conf-dir''=''/app/wwwroot/MBDC/hadoop/etc/hadoop/'' --hadoop配置文件,配了环境变量则不需要。 -);', '1.14', 0, 1 - , '2022-01-20 17:18:54', '2022-01-20 17:18:54');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (14, 'Operator', 'CataLog', 'Other', 'use.catalog.hive' - , '使用hive的catalog', 'USE CATALOG hive;', '1.14', 0, 1 - , '2022-01-20 17:22:53', '2022-01-20 17:22:53');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (15, 'Operator', 'CataLog', 'Other', 'use.catalog.default' - , '使用default的catalog', 'USE CATALOG default_catalog;', '1.14', 0, 1 - , '2022-01-20 17:23:48', '2022-01-20 17:24:23');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (16, 'Variable', '设置参数', 'Other', 'set dialect.hive' - , '使用hive方言', 'Set table.sql-dialect=hive;', '1.14', 0, 1 - , '2022-01-20 17:25:37', '2022-01-20 17:27:23');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (17, 'Variable', '设置参数', 'Other', 'set dialect.default' - , '使用default方言', 'Set table.sql-dialect=default;', '1.14', 0, 1 - , '2022-01-20 17:26:19', '2022-01-20 17:27:20');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (18, 'Module', '建表语句', 'Other', 'create.stream.table.hive' - , '创建流式HIVE表', 'CREATE CATALOG hive WITH ( --创建hive的catalog - ''type'' = ''hive'', - ''hive-conf-dir'' = ''/app/wwwroot/MBDC/hive/conf/'', - ''hadoop-conf-dir''=''/app/wwwroot/MBDC/hadoop/etc/hadoop/'' -); - -USE CATALOG hive; -USE offline_db; --选择库 -set table.sql-dialect=hive; --设置方言 - -CREATE TABLE hive_stream_table ( - user_id STRING, - order_amount DOUBLE -) PARTITIONED BY (dt STRING, hr STRING) STORED AS parquet TBLPROPERTIES ( - ''partition.time-extractor.timestamp-pattern''=''$dt $hr:00:00'', - ''sink.partition-commit.trigger''=''partition-time'', - ''sink.partition-commit.delay''=''1min'', - ''sink.semantic'' = ''exactly-once'', - ''sink.rolling-policy.rollover-interval'' =''1min'', - ''sink.rolling-policy.check-interval''=''1min'', - ''sink.partition-commit.policy.kind''=''metastore,success-file'' -);', '1.14', 0, 1 - , '2022-01-20 17:34:06', '2022-01-20 17:46:41');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (19, 'Module', '建表语句', 'Other', 'create.table.mysql_cdc' - , '创建Mysql_CDC表', 'CREATE TABLE mysql_cdc_table( - cid INT, - sid INT, - cls STRING, - score INT, - PRIMARY KEY (cid) NOT ENFORCED -) WITH ( -''connector'' = ''mysql-cdc'', -''hostname'' = ''127.0.0.1'', -''port'' = ''3306'', -''username'' = ''test'', -''password'' = ''123456'', -''database-name'' = ''test'', -''server-time-zone'' = ''UTC'', -''scan.incremental.snapshot.enabled'' = ''true'', -''debezium.snapshot.mode''=''latest-offset'' ,-- 或者key是scan.startup.mode,initial表示要历史数据,latest-offset表示不要历史数据 -''debezium.datetime.format.date''=''yyyy-MM-dd'', -''debezium.datetime.format.time''=''HH-mm-ss'', -''debezium.datetime.format.datetime''=''yyyy-MM-dd HH-mm-ss'', -''debezium.datetime.format.timestamp''=''yyyy-MM-dd HH-mm-ss'', -''debezium.datetime.format.timestamp.zone''=''UTC+8'', -''table-name'' = ''mysql_cdc_table'');', '1.14', 0, 1 - , '2022-01-20 17:49:14', '2022-01-20 17:52:20');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (20, 'Module', '建表语句', 'Other', 'create.table.hudi' - , '创建hudi表', 'CREATE TABLE hudi_table -( - `goods_order_id` bigint COMMENT ''自增主键id'', - `goods_order_uid` string COMMENT ''订单uid'', - `customer_uid` string COMMENT ''客户uid'', - `customer_name` string COMMENT ''客户name'', - `create_time` timestamp(3) COMMENT ''创建时间'', - `update_time` timestamp(3) COMMENT ''更新时间'', - `create_by` string COMMENT ''创建人uid(唯一标识)'', - `update_by` string COMMENT ''更新人uid(唯一标识)'', - PRIMARY KEY (goods_order_id) NOT ENFORCED -) COMMENT ''hudi_table'' -WITH ( -''connector'' = ''hudi'', -''path'' = ''hdfs://cluster1/data/bizdata/cdc/mysql/order/goods_order'', -- 路径会自动创建 -''hoodie.datasource.write.recordkey.field'' = ''goods_order_id'', -- 主键 -''write.precombine.field'' = ''update_time'', -- 相同的键值时,取此字段最大值,默认ts字段 -''read.streaming.skip_compaction'' = ''true'', -- 避免重复消费问题 -''write.bucket_assign.tasks'' = ''2'', -- 并发写的 bucekt 数 -''write.tasks'' = ''2'', -''compaction.tasks'' = ''1'', -''write.operation'' = ''upsert'', -- UPSERT(插入更新)\\INSERT(插入)\\BULK_INSERT(批插入)(upsert性能会低些,不适合埋点上报) -''write.rate.limit'' = ''20000'', -- 限制每秒多少条 -''table.type'' = ''COPY_ON_WRITE'', -- 默认COPY_ON_WRITE , -''compaction.async.enabled'' = ''true'', -- 在线压缩 -''compaction.trigger.strategy'' = ''num_or_time'', -- 按次数压缩 -''compaction.delta_commits'' = ''20'', -- 默认为5 -''compaction.delta_seconds'' = ''60'', -- 默认为1小时 -''hive_sync.enable'' = ''true'', -- 启用hive同步 -''hive_sync.mode'' = ''hms'', -- 启用hive hms同步,默认jdbc -''hive_sync.metastore.uris'' = ''thrift://cdh2.vision.com:9083'', -- required, metastore的端口 -''hive_sync.jdbc_url'' = ''jdbc:hive2://cdh1.vision.com:10000'', -- required, hiveServer地址 -''hive_sync.table'' = ''order_mysql_goods_order'', -- required, hive 新建的表名 会自动同步hudi的表结构和数据到hive -''hive_sync.db'' = ''cdc_ods'', -- required, hive 新建的数据库名 -''hive_sync.username'' = ''hive'', -- required, HMS 用户名 -''hive_sync.password'' = ''123456'', -- required, HMS 密码 -''hive_sync.skip_ro_suffix'' = ''true'' -- 去除ro后缀 -);', '1.14', 0, 1 - , '2022-01-20 17:56:50', '2022-01-20 17:56:50');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (21, 'Function', '内置函数', '比较函数', 'value1 <> value2' - , '如果value1不等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} <> ${2:}', '1.12', 4, 1 - , '2021-02-22 10:05:38', '2021-03-11 09:58:48');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (22, 'Function', '内置函数', '比较函数', 'value1 > value2' - , '如果value1大于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} > ${2:}', '1.12', 2, 1 - , '2021-02-22 14:37:58', '2021-03-10 11:58:06');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (23, 'Function', '内置函数', '比较函数', 'value1 >= value2' - , '如果value1大于或等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} >= ${2:}', '1.12', 2, 1 - , '2021-02-22 14:38:52', '2022-03-29 19:05:54');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (24, 'Function', '内置函数', '比较函数', 'value1 < value2' - , '如果value1小于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} < ${2:}', '1.12', 0, 1 - , '2021-02-22 14:39:15', '2022-03-29 19:04:58');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (25, 'Function', '内置函数', '比较函数', 'value1 <= value2' - , '如果value1小于或等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} <= ${2:}', '1.12', 0, 1 - , '2021-02-22 14:39:40', '2022-03-29 19:05:17');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (26, 'Function', '内置函数', '比较函数', 'value IS NULL' - , '如果value为NULL,则返回TRUE 。', '${1:} IS NULL', '1.12', 2, 1 - , '2021-02-22 14:40:39', '2021-03-10 11:57:51');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (27, 'Function', '内置函数', '比较函数', 'value IS NOT NULL' - , '如果value不为NULL,则返回TRUE 。', '${1:} IS NOT NULL', '1.12', 0, 1 - , '2021-02-22 14:41:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (28, 'Function', '内置函数', '比较函数', 'value1 IS DISTINCT FROM value2' - , '如果两个值不相等则返回TRUE。NULL值在这里被视为相同的值。', '${1:} IS DISTINCT FROM ${2:}', '1.12', 0, 1 - , '2021-02-22 14:42:39', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (29, 'Function', '内置函数', '比较函数', 'value1 IS NOT DISTINCT FROM value2' - , '如果两个值相等则返回TRUE。NULL值在这里被视为相同的值。', '${1:} IS NOT DISTINCT FROM ${2:}', '1.12', 0, 1 - , '2021-02-22 14:43:23', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (30, 'Function', '内置函数', '比较函数', 'value1 BETWEEN [ ASYMMETRIC | SYMMETRIC ] value2 AND value3' - , '如果value1大于或等于value2和小于或等于value3 返回true', '${1:} BETWEEN ${2:} AND ${3:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (31, 'Function', '内置函数', '比较函数', 'value1 NOT BETWEEN [ ASYMMETRIC | SYMMETRIC ] value2 AND value3' - , '如果value1小于value2或大于value3 返回true', '${1:} NOT BETWEEN ${2:} AND ${3:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (32, 'Function', '内置函数', '比较函数', 'string1 LIKE string2 [ ESCAPE char ]' - , '如果STRING1匹配模式STRING2,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。', '${1:} LIKE ${2:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (33, 'Function', '内置函数', '比较函数', 'string1 NOT LIKE string2 [ ESCAPE char ]' - , '如果STRING1不匹配模式STRING2,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。', '${1:} NOT LIKE ${2:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (34, 'Function', '内置函数', '比较函数', 'string1 SIMILAR TO string2 [ ESCAPE char ]' - , '如果STRING1与SQL正则表达式STRING2匹配,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。', '${1:} SIMILAR TO ${2:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-03-10 11:57:28');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (35, 'Function', '内置函数', '比较函数', 'string1 NOT SIMILAR TO string2 [ ESCAPE char ]' - , '如果STRING1与SQL正则表达式STRING2不匹配,则返回TRUE ;如果STRING1或STRING2为NULL,则返回UNKNOWN 。', '${1:} NOT SIMILAR TO ${2:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (36, 'Function', '内置函数', '比较函数', 'value1 IN (value2 [, value3]* )' - , '如果value1存在于给定列表(value2,value3,...)中,则返回TRUE 。 - -当(value2,value3,...)包含NULL,如果可以找到该元素,则返回TRUE,否则返回UNKNOWN。 - -如果value1为NULL,则始终返回UNKNOWN 。', '${1:} IN (${2:} )', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (37, 'Function', '内置函数', '比较函数', 'value1 NOT IN (value2 [, value3]* )' - , '如果value1不存在于给定列表(value2,value3,...)中,则返回TRUE 。 - -当(value2,value3,...)包含NULL,如果可以找到该元素,则返回TRUE,否则返回UNKNOWN。 - -如果value1为NULL,则始终返回UNKNOWN 。', '${1:} NOT IN (${2:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (38, 'Function', '内置函数', '比较函数', 'EXISTS (sub-query)' - , '如果value存在于子查询中,则返回TRUE。', 'EXISTS (${1:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (39, 'Function', '内置函数', '比较函数', 'value IN (sub-query)' - , '如果value存在于子查询中,则返回TRUE。', '${1:} IN (${2:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (40, 'Function', '内置函数', '比较函数', 'value NOT IN (sub-query)' - , '如果value不存在于子查询中,则返回TRUE。', '${1:} NOT IN (${2:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (41, 'Function', '内置函数', '逻辑函数', 'boolean1 OR boolean2' - , '如果BOOLEAN1为TRUE或BOOLEAN2为TRUE,则返回TRUE。支持三值逻辑。 - -例如,true || Null(Types.BOOLEAN)返回TRUE。', '${1:} OR ${2:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (42, 'Function', '内置函数', '逻辑函数', 'boolean1 AND boolean2' - , '如果BOOLEAN1和BOOLEAN2均为TRUE,则返回TRUE。支持三值逻辑。 - -例如,true && Null(Types.BOOLEAN)返回未知。', '${1:} AND ${2:}', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (43, 'Function', '内置函数', '逻辑函数', 'NOT boolean' - , '如果BOOLEAN为FALSE,则返回TRUE ;如果BOOLEAN为TRUE,则返回FALSE 。 - -如果BOOLEAN为UNKNOWN,则返回UNKNOWN。', 'NOT ${1:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (44, 'Function', '内置函数', '逻辑函数', 'boolean IS FALSE' - , '如果BOOLEAN为FALSE,则返回TRUE ;如果BOOLEAN为TRUE或UNKNOWN,则返回FALSE 。', '${1:} IS FALSE', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (45, 'Function', '内置函数', '逻辑函数', 'boolean IS NOT FALSE' - , '如果BOOLEAN为TRUE或UNKNOWN,则返回TRUE ;如果BOOLEAN为FALSE,则返回FALSE。', '${1:} IS NOT FALSE', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (46, 'Function', '内置函数', '逻辑函数', 'boolean IS TRUE' - , '如果BOOLEAN为TRUE,则返回TRUE;如果BOOLEAN为FALSE或UNKNOWN,则返回FALSE 。', '${1:} IS TRUE', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (47, 'Function', '内置函数', '逻辑函数', 'boolean IS NOT TRUE' - , '如果BOOLEAN为FALSE或UNKNOWN,则返回TRUE ;如果BOOLEAN为TRUE,则返回FALSE 。', '${1:} IS NOT TRUE', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (48, 'Function', '内置函数', '逻辑函数', 'boolean IS UNKNOWN' - , '如果BOOLEAN为UNKNOWN,则返回TRUE ;如果BOOLEAN为TRUE或FALSE,则返回FALSE 。', '${1:} IS UNKNOWN', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (49, 'Function', '内置函数', '逻辑函数', 'boolean IS NOT UNKNOWN' - , '如果BOOLEAN为TRUE或FALSE,则返回TRUE ;如果BOOLEAN为UNKNOWN,则返回FALSE 。', '${1:} IS NOT UNKNOWN', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (50, 'Function', '内置函数', '算术函数', '+ numeric' - , '返回NUMERIC。', '+ ${1:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (51, 'Function', '内置函数', '算术函数', '- numeric' - , '返回负数NUMERIC。', '- ${1:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (52, 'Function', '内置函数', '算术函数', 'numeric1 + numeric2' - , '返回NUMERIC1加NUMERIC2。', '${1:} + ${2:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (53, 'Function', '内置函数', '算术函数', 'numeric1 - numeric2' - , '返回NUMERIC1减去NUMERIC2。', '${1:} - ${2:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (54, 'Function', '内置函数', '算术函数', 'numeric1 * numeric2' - , '返回NUMERIC1乘以NUMERIC2。', '${1:} * ${2:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (55, 'Function', '内置函数', '算术函数', 'numeric1 / numeric2' - , '返回NUMERIC1除以NUMERIC2。', '${1:} / ${2:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (56, 'Function', '内置函数', '算术函数', 'numeric1 % numeric2' - , '返回NUMERIC1除以NUMERIC2的余数(模)。仅当numeric1为负数时,结果为负数。', '${1:} % ${2:} ', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (57, 'Function', '内置函数', '算术函数', 'POWER(numeric1, numeric2)' - , '返回NUMERIC1的NUMERIC2 次幂。', 'POWER(${1:} , ${2:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (58, 'Function', '内置函数', '算术函数', 'ABS(numeric)' - , '返回NUMERIC的绝对值。', 'ABS(${1:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (59, 'Function', '内置函数', '算术函数', 'MOD(numeric1, numeric2)' - , '返回numeric1除以numeric2的余数(模)。只有当numeric1为负数时,结果才为负数', 'MOD(${1:} , ${2:} )', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (60, 'Function', '内置函数', '算术函数', 'SQRT(numeric)' - , '返回NUMERIC的平方根。', 'SQRT(${1:})', '1.12', 0, 1 - , '2021-02-22 14:44:26', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (61, 'Function', '内置函数', '算术函数', 'LN(numeric)' - , '返回NUMERIC的自然对数(以e为底)。', 'LN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (62, 'Function', '内置函数', '算术函数', 'LOG10(numeric)' - , '返回NUMERIC的以10为底的对数。', 'LOG10(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (63, 'Function', '内置函数', '算术函数', 'LOG2(numeric)' - , '返回NUMERIC的以2为底的对数。', 'LOG2(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (64, 'Function', '内置函数', '算术函数', 'EXP(numeric)' - , '返回e 的 NUMERIC 次幂。', 'EXP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (65, 'Function', '内置函数', '算术函数', 'FLOOR(numeric)' - , '向下舍入NUMERIC,并返回小于或等于NUMERIC的最大整数。', 'FLOOR(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (66, 'Function', '内置函数', '算术函数', 'SIN(numeric)' - , '返回NUMERIC的正弦值。', 'SIN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (67, 'Function', '内置函数', '算术函数', 'SINH(numeric)' - , '返回NUMERIC的双曲正弦值。 - -返回类型为DOUBLE。', 'SINH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (68, 'Function', '内置函数', '算术函数', 'COS(numeric)' - , '返回NUMERIC的余弦值。', 'COS(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (69, 'Function', '内置函数', '算术函数', 'TAN(numeric)' - , '返回NUMERIC的正切。', 'TAN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (70, 'Function', '内置函数', '算术函数', 'TANH(numeric)' - , '返回NUMERIC的双曲正切值。 - -返回类型为DOUBLE。', 'TANH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (71, 'Function', '内置函数', '算术函数', 'COT(numeric)' - , '返回NUMERIC的余切。', 'COT(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (72, 'Function', '内置函数', '算术函数', 'ASIN(numeric)' - , '返回NUMERIC的反正弦值。', 'ASIN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (73, 'Function', '内置函数', '算术函数', 'ACOS(numeric)' - , '返回NUMERIC的反余弦值。', 'ACOS(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (74, 'Function', '内置函数', '算术函数', 'ATAN(numeric)' - , '返回NUMERIC的反正切。', 'ATAN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (75, 'Function', '内置函数', '算术函数', 'ATAN2(numeric1, numeric2)' - , '返回坐标的反正切(NUMERIC1,NUMERIC2)。', 'ATAN2(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (76, 'Function', '内置函数', '算术函数', 'COSH(numeric)' - , '返回NUMERIC的双曲余弦值。 - -返回值类型为DOUBLE。', 'COSH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (77, 'Function', '内置函数', '算术函数', 'DEGREES(numeric)' - , '返回弧度NUMERIC的度数表示形式', 'DEGREES(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (78, 'Function', '内置函数', '算术函数', 'RADIANS(numeric)' - , '返回度数NUMERIC的弧度表示。', 'RADIANS(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (79, 'Function', '内置函数', '算术函数', 'SIGN(numeric)' - , '返回NUMERIC的符号。', 'SIGN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (80, 'Function', '内置函数', '算术函数', 'ROUND(numeric, integer)' - , '返回一个数字,四舍五入为NUMERIC的INT小数位。', 'ROUND(${1:} , ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (81, 'Function', '内置函数', '算术函数', 'PI' - , '返回一个比任何其他值都更接近圆周率的值。', 'PI', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (82, 'Function', '内置函数', '算术函数', 'E()' - , '返回一个比任何其他值都更接近e的值。', 'E()', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (83, 'Function', '内置函数', '算术函数', 'RAND()' - , '返回介于0.0(含)和1.0(不含)之间的伪随机双精度值。', 'RAND()', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (84, 'Function', '内置函数', '算术函数', 'RAND(integer)' - , '返回带有初始种子INTEGER的介于0.0(含)和1.0(不含)之间的伪随机双精度值。 - -如果两个RAND函数具有相同的初始种子,它们将返回相同的数字序列。', 'RAND(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (85, 'Function', '内置函数', '算术函数', 'RAND_INTEGER(integer)' - , '返回介于0(含)和INTEGER(不含)之间的伪随机整数值。', 'RAND_INTEGER(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (86, 'Function', '内置函数', '算术函数', 'RAND_INTEGER(integer1, integer2)' - , '返回介于0(含)和INTEGER2(不含)之间的伪随机整数值,其初始种子为INTEGER1。 - -如果两个randInteger函数具有相同的初始种子和边界,它们将返回相同的数字序列。', 'RAND_INTEGER(${1:} , ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (87, 'Function', '内置函数', '算术函数', 'UUID()' - , '根据RFC 4122 type 4(伪随机生成)UUID返回UUID(通用唯一标识符)字符串 - -(例如,“ 3d3c68f7-f608-473f-b60c-b0c44ad4cc4e”)。使用加密强度高的伪随机数生成器生成UUID。', 'UUID()', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (88, 'Function', '内置函数', '算术函数', 'BIN(integer)' - , '以二进制格式返回INTEGER的字符串表示形式。如果INTEGER为NULL,则返回NULL。 - -例如,4.bin()返回“ 100”并12.bin()返回“ 1100”。', 'BIN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (89, 'Function', '内置函数', '算术函数', 'HEX(numeric) -HEX(string)' - , '以十六进制格式返回整数NUMERIC值或STRING的字符串表示形式。如果参数为NULL,则返回NULL。 - -例如,数字20导致“ 14”,数字100导致“ 64”,字符串“ hello,world”导致“ 68656C6C6F2C776F726C64”。', 'HEX(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (90, 'Function', '内置函数', '算术函数', 'TRUNCATE(numeric1, integer2)' - , '返回一个小数点后被截断为integer2位的数字。', 'TRUNCATE(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (91, 'Function', '内置函数', '算术函数', 'PI()' - , '返回π (pi)的值。仅在blink planner中支持。', 'PI()', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (92, 'Function', '内置函数', '算术函数', 'LOG(numeric1)' - , '如果不带参数调用,则返回NUMERIC1的自然对数。当使用参数调用时,将NUMERIC1的对数返回到基数NUMERIC2。 - -注意:当前,NUMERIC1必须大于0,而NUMERIC2必须大于1。', 'LOG(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (93, 'Function', '内置函数', '算术函数', 'LOG(numeric1, numeric2)' - , '如果不带参数调用,则返回NUMERIC1的自然对数。当使用参数调用时,将NUMERIC1的对数返回到基数NUMERIC2。 - -注意:当前,NUMERIC1必须大于0,而NUMERIC2必须大于1。', 'LOG(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (94, 'Function', '内置函数', '算术函数', 'CEIL(numeric)' - , '将NUMERIC向上舍入,并返回大于或等于NUMERIC的最小整数。', 'CEIL(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (95, 'Function', '内置函数', '算术函数', 'CEILING(numeric)' - , '将NUMERIC向上舍入,并返回大于或等于NUMERIC的最小整数。', 'CEILING(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (96, 'Function', '内置函数', '字符串函数', 'string1 || string2' - , '返回string1和string2的连接。', '${1:} || ${2:}', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (97, 'Function', '内置函数', '字符串函数', 'UPPER(string)' - , '以大写形式返回STRING。', 'UPPER(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (98, 'Function', '内置函数', '字符串函数', 'LOWER(string)' - , '以小写形式返回STRING。', 'LOWER(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (99, 'Function', '内置函数', '字符串函数', 'POSITION(string1 IN string2)' - , '返回STRING1在STRING2中第一次出现的位置(从1开始); - -如果在STRING2中找不到STRING1,则返回0 。', 'POSITION(${1:} IN ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (100, 'Function', '内置函数', '字符串函数', 'TRIM([ BOTH | LEADING | TRAILING ] string1 FROM string2)' - , '返回一个字符串,该字符串从STRING中删除前导和/或结尾字符。', 'TRIM(${1:} FROM ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (101, 'Function', '内置函数', '字符串函数', 'LTRIM(string)' - , '返回一个字符串,该字符串从STRING除去左空格。 - -例如," This is a test String.".ltrim()返回“This is a test String.”。', 'LTRIM(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (102, 'Function', '内置函数', '字符串函数', 'RTRIM(string)' - , '返回一个字符串,该字符串从STRING中删除正确的空格。 - -例如,"This is a test String. ".rtrim()返回“This is a test String.”。', 'RTRIM(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (103, 'Function', '内置函数', '字符串函数', 'REPEAT(string, integer)' - , '返回一个字符串,该字符串重复基本STRING INT次。 - -例如,"This is a test String.".repeat(2)返回“This is a test String.This is a test String.”。', 'REPEAT(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (104, 'Function', '内置函数', '字符串函数', 'REGEXP_REPLACE(string1, string2, string3)' - , '返回字符串STRING1所有匹配正则表达式的子串STRING2连续被替换STRING3。 - -例如,"foobar".regexpReplace("oo|ar", "")返回“ fb”。', 'REGEXP_REPLACE(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (105, 'Function', '内置函数', '字符串函数', 'OVERLAY(string1 PLACING string2 FROM integer1 [ FOR integer2 ])' - , '从位置INT1返回一个字符串,该字符串将STRING1的INT2(默认为STRING2的长度)字符替换为STRING2', 'OVERLAY(${1:} PLACING ${2:} FROM ${3:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (106, 'Function', '内置函数', '字符串函数', 'SUBSTRING(string FROM integer1 [ FOR integer2 ])' - , '返回字符串STRING的子字符串,从位置INT1开始,长度为INT2(默认为结尾)。', 'SUBSTRING${1:} FROM ${2:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (107, 'Function', '内置函数', '字符串函数', 'REPLACE(string1, string2, string3)' - , '返回一个新字符串替换其中出现的所有STRING2与STRING3(非重叠)从STRING1。', 'REPLACE(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (108, 'Function', '内置函数', '字符串函数', 'REGEXP_EXTRACT(string1, string2[, integer])' - , '从STRING1返回一个字符串,该字符串使用指定的正则表达式STRING2和正则表达式匹配组索引INTEGER1提取。', 'REGEXP_EXTRACT(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (109, 'Function', '内置函数', '字符串函数', 'INITCAP(string)' - , '返回一种新形式的STRING,其中每个单词的第一个字符转换为大写,其余字符转换为小写。', 'INITCAP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (110, 'Function', '内置函数', '字符串函数', 'CONCAT(string1, string2,...)' - , '返回连接STRING1,STRING2,...的字符串。如果任何参数为NULL,则返回NULL。', 'CONCAT(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (111, 'Function', '内置函数', '字符串函数', 'CONCAT_WS(string1, string2, string3,...)' - , '返回一个字符串,会连接STRING2,STRING3,......与分离STRING1。', 'CONCAT_WS(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (112, 'Function', '内置函数', '字符串函数', 'LPAD(string1, integer, string2)' - , '返回一个新字符串,该字符串从STRING1的左侧填充STRING2,长度为INT个字符。', 'LPAD(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (113, 'Function', '内置函数', '字符串函数', 'RPAD(string1, integer, string2)' - , '返回一个新字符串,该字符串从STRING1右侧填充STRING2,长度为INT个字符。', 'RPAD(${1:} , ${2:} , ${3:} )', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (114, 'Function', '内置函数', '字符串函数', 'FROM_BASE64(string)' - , '返回来自STRING的base64解码结果;如果STRING为NULL,则返回null 。', 'FROM_BASE64(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (115, 'Function', '内置函数', '字符串函数', 'TO_BASE64(string)' - , '从STRING返回base64编码的结果;如果STRING为NULL,则返回NULL。', 'TO_BASE64(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (116, 'Function', '内置函数', '字符串函数', 'ASCII(string)' - , '返回字符串的第一个字符的数值。如果字符串为NULL,则返回NULL。仅在blink planner中支持。', 'ASCII(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (117, 'Function', '内置函数', '字符串函数', 'CHR(integer)' - , '返回与integer在二进制上等价的ASCII字符。如果integer大于255,我们将首先得到integer的模数除以255,并返回模数的CHR。如果integer为NULL,则返回NULL。仅在blink planner中支持。', 'CHR(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (118, 'Function', '内置函数', '字符串函数', 'DECODE(binary, string)' - , '使用提供的字符集(''US-ASCII'', ''ISO-8859-1'', ''UTF-8'', ''UTF-16BE'', ''UTF-16LE'', ''UTF-16''之一)将第一个参数解码为字符串。如果任意一个参数为空,结果也将为空。仅在blink planner中支持。', 'DECODE(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (119, 'Function', '内置函数', '字符串函数', 'ENCODE(string1, string2)' - , '使用提供的string2字符集(''US-ASCII'', ''ISO-8859-1'', ''UTF-8'', ''UTF-16BE'', ''UTF-16LE'', ''UTF-16''之一)将string1编码为二进制。如果任意一个参数为空,结果也将为空。仅在blink planner中支持。', 'ENCODE(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (120, 'Function', '内置函数', '字符串函数', 'INSTR(string1, string2)' - , '返回string2在string1中第一次出现的位置。如果任何参数为空,则返回NULL。仅在blink planner中支持。', 'INSTR(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (121, 'Function', '内置函数', '字符串函数', 'LEFT(string, integer)' - , '返回字符串中最左边的整数字符。如果整数为负,则返回空字符串。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。', 'LEFT(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (122, 'Function', '内置函数', '字符串函数', 'RIGHT(string, integer)' - , '返回字符串中最右边的整数字符。如果整数为负,则返回空字符串。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。', 'RIGHT(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (123, 'Function', '内置函数', '字符串函数', 'LOCATE(string1, string2[, integer])' - , '返回string1在string2中的位置整数之后第一次出现的位置。如果没有找到,返回0。如果任何参数为NULL,则返回NULL仅在blink planner中支持。', 'LOCATE(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (124, 'Function', '内置函数', '字符串函数', 'PARSE_URL(string1, string2[, string3])' - , '从URL返回指定的部分。string2的有效值包括''HOST'', ''PATH'', ''QUERY'', ''REF'', ''PROTOCOL'', ''AUTHORITY'', ''FILE''和''USERINFO''。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。', 'PARSE_URL(${1:} , ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (125, 'Function', '内置函数', '字符串函数', 'REGEXP(string1, string2)' - , '如果string1的任何子字符串(可能为空)与Java正则表达式string2匹配,则返回TRUE,否则返回FALSE。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。', 'REGEXP(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (126, 'Function', '内置函数', '字符串函数', 'REVERSE(string)' - , '返回反向字符串。如果字符串为NULL,则返回NULL仅在blink planner中支持。', 'REVERSE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (127, 'Function', '内置函数', '字符串函数', 'SPLIT_INDEX(string1, string2, integer1)' - , '通过分隔符string2拆分string1,返回拆分字符串的整数(从零开始)字符串。如果整数为负,返回NULL。如果任何参数为NULL,则返回NULL。仅在blink planner中支持。', 'SPLIT_INDEX(${1:}, ${2:} , ${3:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (128, 'Function', '内置函数', '字符串函数', 'STR_TO_MAP(string1[, string2, string3]])' - , '使用分隔符将string1分割成键/值对后返回一个映射。string2是pair分隔符,默认为'',''。string3是键值分隔符,默认为''=''。仅在blink planner中支持。', 'STR_TO_MAP(${1:})', '1.12', 4, 1 - , '2021-02-22 15:29:35', '2021-05-20 19:59:50');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (129, 'Function', '内置函数', '字符串函数', 'SUBSTR(string[, integer1[, integer2]])' - , '返回一个字符串的子字符串,从位置integer1开始,长度为integer2(默认到末尾)。仅在blink planner中支持。', 'SUBSTR(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (130, 'Function', '内置函数', '字符串函数', 'CHAR_LENGTH(string)' - , '返回STRING中的字符数。', 'CHAR_LENGTH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (131, 'Function', '内置函数', '字符串函数', 'CHARACTER_LENGTH(string)' - , '返回STRING中的字符数。', 'CHARACTER_LENGTH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (132, 'Function', '内置函数', '时间函数', 'DATE string' - , '返回以“ yyyy-MM-dd”形式从STRING解析的SQL日期。', 'DATE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (133, 'Function', '内置函数', '时间函数', 'TIME string' - , '返回以“ HH:mm:ss”的形式从STRING解析的SQL时间。', 'TIME(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (134, 'Function', '内置函数', '时间函数', 'TIMESTAMP string' - , '返回从STRING解析的SQL时间戳,格式为“ yyyy-MM-dd HH:mm:ss [.SSS]”', 'TIMESTAMP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (135, 'Function', '内置函数', '时间函数', 'INTERVAL string range' - , '解析“dd hh:mm:ss”形式的区间字符串。fff表示毫秒间隔,yyyy-mm表示月间隔。间隔范围可以是天、分钟、天到小时或天到秒,以毫秒为间隔;年或年到月的间隔。', 'INTERVAL ${1:} range', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (136, 'Function', '内置函数', '时间函数', 'CURRENT_DATE' - , '返回UTC时区中的当前SQL日期。', 'CURRENT_DATE', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (137, 'Function', '内置函数', '时间函数', 'CURRENT_TIME' - , '返回UTC时区的当前SQL时间。', 'CURRENT_TIME', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (138, 'Function', '内置函数', '时间函数', 'CURRENT_TIMESTAMP' - , '返回UTC时区内的当前SQL时间戳。', 'CURRENT_TIMESTAMP', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (139, 'Function', '内置函数', '时间函数', 'LOCALTIME' - , '返回本地时区的当前SQL时间。', 'LOCALTIME', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (140, 'Function', '内置函数', '时间函数', 'LOCALTIMESTAMP' - , '返回本地时区的当前SQL时间戳。', 'LOCALTIMESTAMP', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (141, 'Function', '内置函数', '时间函数', 'EXTRACT(timeintervalunit FROM temporal)' - , '返回从时域的timeintervalunit部分提取的长值。', 'EXTRACT(${1:} FROM ${2:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (142, 'Function', '内置函数', '时间函数', 'YEAR(date)' - , '返回SQL date日期的年份。等价于EXTRACT(YEAR FROM date)。', 'YEAR(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (143, 'Function', '内置函数', '时间函数', 'QUARTER(date)' - , '从SQL date date返回一年中的季度(1到4之间的整数)。相当于EXTRACT(从日期起四分之一)。', 'QUARTER(${1:})', '1.12', 0, 1 - , '2021-02-22 15:29:35', '2021-02-22 15:28:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (144, 'Function', '内置函数', '时间函数', 'MONTH(date)' - , '返回SQL date date中的某月(1到12之间的整数)。等价于EXTRACT(MONTH FROM date)。', 'MONTH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (145, 'Function', '内置函数', '时间函数', 'WEEK(date)' - , '从SQL date date返回一年中的某个星期(1到53之间的整数)。相当于EXTRACT(从日期开始的星期)。', 'WEEK(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (146, 'Function', '内置函数', '时间函数', 'DAYOFYEAR(date)' - , '返回SQL date date中的某一天(1到366之间的整数)。相当于EXTRACT(DOY FROM date)。', 'DAYOFYEAR(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (147, 'Function', '内置函数', '时间函数', 'DAYOFMONTH(date)' - , '从SQL date date返回一个月的哪一天(1到31之间的整数)。相当于EXTRACT(DAY FROM date)。', 'DAYOFMONTH(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (148, 'Function', '内置函数', '时间函数', 'DAYOFWEEK(date)' - , '返回星期几(1到7之间的整数;星期日= 1)从SQL日期日期。相当于提取(道指从日期)。', 'DAYOFWEEK(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (149, 'Function', '内置函数', '时间函数', 'HOUR(timestamp)' - , '从SQL timestamp timestamp返回一天中的小时(0到23之间的整数)。相当于EXTRACT(HOUR FROM timestamp)。', 'HOUR(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (150, 'Function', '内置函数', '时间函数', 'MINUTE(timestamp)' - , '从SQL timestamp timestamp返回一小时的分钟(0到59之间的整数)。相当于EXTRACT(分钟从时间戳)。', 'MINUTE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (151, 'Function', '内置函数', '时间函数', 'SECOND(timestamp)' - , '从SQL时间戳返回一分钟中的秒(0到59之间的整数)。等价于EXTRACT(从时间戳开始倒数第二)。', 'SECOND(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (152, 'Function', '内置函数', '时间函数', 'FLOOR(timepoint TO timeintervalunit)' - , '返回一个将timepoint舍入到时间单位timeintervalunit的值。', 'FLOOR(${1:} TO ${2:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (153, 'Function', '内置函数', '时间函数', 'CEIL(timepoint TO timeintervalunit)' - , '返回一个将timepoint舍入到时间单位timeintervalunit的值。', 'CEIL(${1:} TO ${2:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (154, 'Function', '内置函数', '时间函数', '(timepoint1, temporal1) OVERLAPS (timepoint2, temporal2)' - , '如果(timepoint1, temporal1)和(timepoint2, temporal2)定义的两个时间间隔重叠,则返回TRUE。时间值可以是时间点或时间间隔。', '(${1:} , ${1:}) OVERLAPS (${2:} , ${2:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (155, 'Function', '内置函数', '时间函数', 'DATE_FORMAT(timestamp, string)' - , '注意这个功能有严重的错误,现在不应该使用。请实现一个自定义的UDF,或者使用EXTRACT作为解决方案。', 'DATE_FORMAT(${1:}, ''yyyy-MM-dd'')', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (156, 'Function', '内置函数', '时间函数', 'TIMESTAMPADD(timeintervalunit, interval, timepoint)' - , '返回一个新的时间值,该值将一个(带符号的)整数间隔添加到时间点。间隔的单位由unit参数给出,它应该是以下值之一:秒、分、小时、日、周、月、季度或年。', 'TIMESTAMPADD(${1:} , ${2:} , ${3:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (157, 'Function', '内置函数', '时间函数', 'TIMESTAMPDIFF(timepointunit, timepoint1, timepoint2)' - , '返回timepointunit在timepoint1和timepoint2之间的(带符号)数。间隔的单位由第一个参数给出,它应该是以下值之一:秒、分、小时、日、月或年。', 'TIMESTAMPDIFF(${1:} , ${2:} , ${3:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (158, 'Function', '内置函数', '时间函数', 'CONVERT_TZ(string1, string2, string3)' - , '将时区string2中的datetime string1(默认ISO时间戳格式''yyyy-MM-dd HH:mm:ss'')转换为时区string3。时区的格式可以是缩写,如“PST”;可以是全名,如“America/Los_Angeles”;或者是自定义ID,如“GMT-8:00”。仅在blink planner中支持。', 'CONVERT_TZ(${1:} , ${2:} , ${3:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (159, 'Function', '内置函数', '时间函数', 'FROM_UNIXTIME(numeric[, string])' - , '以字符串格式返回数值参数的表示形式(默认为''yyyy-MM-dd HH:mm:ss'')。numeric是一个内部时间戳值,表示从UTC ''1970-01-01 00:00:00''开始的秒数,例如UNIX_TIMESTAMP()函数生成的时间戳。返回值用会话时区表示(在TableConfig中指定)。仅在blink planner中支持。', 'FROM_UNIXTIME(${1:} )', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (160, 'Function', '内置函数', '时间函数', 'UNIX_TIMESTAMP()' - , '获取当前Unix时间戳(以秒为单位)。仅在blink planner中支持。', 'UNIX_TIMESTAMP()', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (161, 'Function', '内置函数', '时间函数', 'UNIX_TIMESTAMP(string1[, string2])' - , '转换日期时间字符串string1,格式为string2(缺省为yyyy-MM-dd HH:mm:ss,如果没有指定)为Unix时间戳(以秒为单位),使用表配置中指定的时区。仅在blink planner中支持。', 'UNIX_TIMESTAMP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (162, 'Function', '内置函数', '时间函数', 'TO_DATE(string1[, string2])' - , '将格式为string2的日期字符串string1(默认为''yyyy-MM-dd'')转换为日期。仅在blink planner中支持。', 'TO_DATE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (163, 'Function', '内置函数', '时间函数', 'TO_TIMESTAMP(string1[, string2])' - , '将会话时区(由TableConfig指定)下的日期时间字符串string1转换为时间戳,格式为string2(默认为''yyyy-MM-dd HH:mm:ss'')。仅在blink planner中支持。', 'TO_TIMESTAMP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (164, 'Function', '内置函数', '时间函数', 'NOW()' - , '返回UTC时区内的当前SQL时间戳。仅在blink planner中支持。', 'NOW()', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (165, 'Function', '内置函数', '条件函数', 'CASE value -WHEN value1_1 [, value1_2 ]* THEN result1 -[ WHEN value2_1 [, value2_2 ]* THEN result2 ]* -[ ELSE resultZ ] -END' - , '当第一个时间值包含在(valueX_1, valueX_2,…)中时,返回resultX。如果没有匹配的值,则返回resultZ,否则返回NULL。', 'CASE ${1:} - WHEN ${2:} THEN ${3:} - ELSE ${4:} -END AS ${5:}', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (166, 'Function', '内置函数', '条件函数', 'CASE -WHEN condition1 THEN result1 -[ WHEN condition2 THEN result2 ]* -[ ELSE resultZ ] -END' - , '当第一个条件满足时返回resultX。当不满足任何条件时,如果提供了resultZ则返回resultZ,否则返回NULL。', 'CASE WHEN ${1:} THEN ${2:} - ELSE ${3:} -END AS ${4:}', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (167, 'Function', '内置函数', '条件函数', 'NULLIF(value1, value2)' - , '如果value1等于value2,则返回NULL;否则返回value1。', 'NULLIF(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (168, 'Function', '内置函数', '条件函数', 'COALESCE(value1, value2 [, value3 ]* )' - , '返回value1, value2, ....中的第一个非空值', 'COALESCE(${1:} )', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (169, 'Function', '内置函数', '条件函数', 'IF(condition, true_value, false_value)' - , '如果条件满足则返回true值,否则返回false值。仅在blink planner中支持。', 'IF((${1:}, ${2:}, ${3:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (170, 'Function', '内置函数', '条件函数', 'IS_ALPHA(string)' - , '如果字符串中所有字符都是字母则返回true,否则返回false。仅在blink planner中支持。', 'IS_ALPHA(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (171, 'Function', '内置函数', '条件函数', 'IS_DECIMAL(string)' - , '如果字符串可以被解析为有效的数字则返回true,否则返回false。仅在blink planner中支持。', 'IS_DECIMAL(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (172, 'Function', '内置函数', '条件函数', 'IS_DIGIT(string)' - , '如果字符串中所有字符都是数字则返回true,否则返回false。仅在blink planner中支持。', 'IS_DIGIT(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (173, 'Function', '内置函数', '类型转换函数功能', 'CAST(value AS type)' - , '返回一个要转换为type类型的新值。', 'CAST(${1:} AS ${2:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (174, 'Function', '内置函数', 'Collection 函数', 'CARDINALITY(array)' - , '返回数组中元素的数量。', 'CARDINALITY(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (175, 'Function', '内置函数', 'Collection 函数', 'array ‘[’ integer ‘]’' - , '返回数组中位于整数位置的元素。索引从1开始。', 'array[${1:}]', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (176, 'Function', '内置函数', 'Collection 函数', 'ELEMENT(array)' - , '返回数组的唯一元素(其基数应为1);如果数组为空,则返回NULL。如果数组有多个元素,则抛出异常。', 'ELEMENT(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (177, 'Function', '内置函数', 'Collection 函数', 'CARDINALITY(map)' - , '返回map中的条目数。', 'CARDINALITY(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (178, 'Function', '内置函数', 'Collection 函数', 'map ‘[’ value ‘]’' - , '返回map中key value指定的值。', 'map[${1:}]', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (179, 'Function', '内置函数', 'Value Construction函数', 'ARRAY ‘[’ value1 [, value2 ]* ‘]’' - , '返回一个由一系列值(value1, value2,…)创建的数组。', 'ARRAY[ ${1:} ]', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (180, 'Function', '内置函数', 'Value Construction函数', 'MAP ‘[’ value1, value2 [, value3, value4 ]* ‘]’' - , '返回一个从键值对列表((value1, value2), (value3, value4),…)创建的映射。', 'MAP[ ${1:} ]', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (181, 'Function', '内置函数', 'Value Construction函数', 'implicit constructor with parenthesis -(value1 [, value2]*)' - , '返回从值列表(value1, value2,…)创建的行。', '(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (182, 'Function', '内置函数', 'Value Construction函数', 'explicit ROW constructor -ROW(value1 [, value2]*)' - , '返回从值列表(value1, value2,…)创建的行。', 'ROW(${1:}) ', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (183, 'Function', '内置函数', 'Value Access函数', 'tableName.compositeType.field' - , '按名称从Flink复合类型(例如,Tuple, POJO)中返回一个字段的值。', 'tableName.compositeType.field', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (184, 'Function', '内置函数', 'Value Access函数', 'tableName.compositeType.*' - , '返回Flink复合类型(例如,Tuple, POJO)的平面表示,它将每个直接子类型转换为一个单独的字段。在大多数情况下,平面表示的字段的名称与原始字段类似,但使用了$分隔符(例如,mypojo$mytuple$f0)。', 'tableName.compositeType.*', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (185, 'Function', '内置函数', '分组函数', 'GROUP_ID()' - , '返回唯一标识分组键组合的整数', 'GROUP_ID()', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (186, 'Function', '内置函数', '分组函数', 'GROUPING(expression1 [, expression2]* ) -GROUPING_ID(expression1 [, expression2]* )' - , '返回给定分组表达式的位向量。', 'GROUPING(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (187, 'Function', '内置函数', 'hash函数', 'MD5(string)' - , '以32位十六进制数字的字符串形式返回string的MD5哈希值;如果字符串为NULL,则返回NULL。', 'MD5(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (188, 'Function', '内置函数', 'hash函数', 'SHA1(string)' - , '返回字符串的SHA-1散列,作为一个由40个十六进制数字组成的字符串;如果字符串为NULL,则返回NULL', 'SHA1(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (189, 'Function', '内置函数', 'hash函数', 'SHA224(string)' - , '以56位十六进制数字的字符串形式返回字符串的SHA-224散列;如果字符串为NULL,则返回NULL。', 'SHA224(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (190, 'Function', '内置函数', 'hash函数', 'SHA256(string)' - , '以64位十六进制数字的字符串形式返回字符串的SHA-256散列;如果字符串为NULL,则返回NULL。', 'SHA256(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (191, 'Function', '内置函数', 'hash函数', 'SHA384(string)' - , '以96个十六进制数字的字符串形式返回string的SHA-384散列;如果字符串为NULL,则返回NULL。', 'SHA384(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (192, 'Function', '内置函数', 'hash函数', 'SHA512(string)' - , '以128位十六进制数字的字符串形式返回字符串的SHA-512散列;如果字符串为NULL,则返回NULL。', 'SHA512(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (193, 'Function', '内置函数', 'hash函数', 'SHA2(string, hashLength)' - , '使用SHA-2哈希函数族(SHA-224、SHA-256、SHA-384或SHA-512)返回哈希值。第一个参数string是要散列的字符串,第二个参数hashLength是结果的位长度(224、256、384或512)。如果string或hashLength为NULL,则返回NULL。', 'SHA2(${1:}, ${2:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (194, 'Function', '内置函数', '聚合函数', 'COUNT([ ALL ] expression | DISTINCT expression1 [, expression2]*)' - , '默认情况下或使用ALL时,返回表达式不为空的输入行数。对每个值的唯一实例使用DISTINCT。', 'COUNT( DISTINCT ${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (195, 'Function', '内置函数', '聚合函数', 'COUNT(*) -COUNT(1)' - , '返回输入行数。', 'COUNT(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (196, 'Function', '内置函数', '聚合函数', 'AVG([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的平均值(算术平均值)。对每个值的唯一实例使用DISTINCT。', 'AVG(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (197, 'Function', '内置函数', '聚合函数', 'SUM([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,返回所有输入行表达式的和。对每个值的唯一实例使用DISTINCT。', 'SUM(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (198, 'Function', '内置函数', '聚合函数', 'MAX([ ALL | DISTINCT ] expression)' - , '默认情况下或使用关键字ALL,返回表达式在所有输入行中的最大值。对每个值的唯一实例使用DISTINCT。', 'MAX(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (199, 'Function', '内置函数', '聚合函数', 'MIN([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的最小值。对每个值的唯一实例使用DISTINCT。', 'MIN(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (200, 'Function', '内置函数', '聚合函数', 'STDDEV_POP([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的总体标准差。对每个值的唯一实例使用DISTINCT。', 'STDDEV_POP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (201, 'Function', '内置函数', '聚合函数', 'STDDEV_SAMP([ ALL | DISTINCT ] expression)' - , '默认情况下或使用关键字ALL时,返回表达式在所有输入行中的样本标准差。对每个值的唯一实例使用DISTINCT。', 'STDDEV_SAMP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (202, 'Function', '内置函数', '聚合函数', 'VAR_POP([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的总体方差(总体标准差的平方)。对每个值的唯一实例使用DISTINCT。', 'VAR_POP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (203, 'Function', '内置函数', '聚合函数', 'VAR_SAMP([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,返回表达式在所有输入行中的样本方差(样本标准差的平方)。对每个值的唯一实例使用DISTINCT。', 'VAR_SAMP(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (204, 'Function', '内置函数', '聚合函数', 'COLLECT([ ALL | DISTINCT ] expression)' - , '默认情况下,或使用关键字ALL,跨所有输入行返回表达式的多集。空值将被忽略。对每个值的唯一实例使用DISTINCT。', 'COLLECT(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (205, 'Function', '内置函数', '聚合函数', 'VARIANCE([ ALL | DISTINCT ] expression)' - , 'VAR_SAMP的同义词。仅在blink planner中支持。', 'VARIANCE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (206, 'Function', '内置函数', '聚合函数', 'RANK()' - , '返回值在一组值中的秩。结果是1加上分区顺序中位于当前行之前或等于当前行的行数。这些值将在序列中产生空白。仅在blink planner中支持。', 'RANK()', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (207, 'Function', '内置函数', '聚合函数', 'DENSE_RANK()' - , '返回值在一组值中的秩。结果是1加上前面分配的秩值。与函数rank不同,dense_rank不会在排序序列中产生空隙。仅在blink planner中支持。', 'DENSE_RANK()', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (208, 'Function', '内置函数', '聚合函数', 'ROW_NUMBER()' - , '根据窗口分区中的行顺序,为每一行分配一个惟一的连续数字,从1开始。仅在blink planner中支持。', 'ROW_NUMBER()', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (209, 'Function', '内置函数', '聚合函数', 'LEAD(expression [, offset] [, default] )' - , '返回表达式在窗口中当前行之前的偏移行上的值。offset的默认值是1,default的默认值是NULL。仅在blink planner中支持。', 'LEAD(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (210, 'Function', '内置函数', '聚合函数', 'LAG(expression [, offset] [, default])' - , '返回表达式的值,该值位于窗口中当前行之后的偏移行。offset的默认值是1,default的默认值是NULL。仅在blink planner中支持。', 'LAG(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (211, 'Function', '内置函数', '聚合函数', 'FIRST_VALUE(expression)' - , '返回一组有序值中的第一个值。仅在blink planner中支持。', 'FIRST_VALUE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (212, 'Function', '内置函数', '聚合函数', 'LAST_VALUE(expression)' - , '返回一组有序值中的最后一个值。仅在blink planner中支持。', 'LAST_VALUE(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (213, 'Function', '内置函数', '聚合函数', 'LISTAGG(expression [, separator])' - , '连接字符串表达式的值,并在它们之间放置分隔符值。分隔符没有添加在字符串的末尾。分隔符的默认值是'',''。仅在blink planner中支持。', 'LISTAGG(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (214, 'Function', '内置函数', '列函数', 'withColumns(…)' - , '选择的列', 'withColumns(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (215, 'Function', '内置函数', '列函数', 'withoutColumns(…)' - , '不选择的列', 'withoutColumns(${1:})', '1.12', 0, 1 - , '2021-02-22 15:46:48', '2021-02-22 15:47:21');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (216, 'Function', '内置函数', '比较函数', 'value1 = value2' - , '如果value1等于value2 返回true; 如果value1或value2为NULL,则返回UNKNOWN 。', '${1:} =${2:}', '1.12', 9, 1 - , '2021-02-22 10:06:49', '2021-02-24 09:40:30');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (217, 'Function', 'UDF', '表值聚合函数', 'TO_MAP(string1,object2[, string3])' - , '将非规则一维表转化为规则二维表,string1是key。string2是value。string3为非必填项,表示key的值域(维度),用英文逗号分割。', 'TO_MAP(${1:})', '1.12', 8, 1 - , '2021-05-20 19:59:22', '2021-05-20 20:00:54');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (218, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE print' - , 'Whole library synchronization print', 'EXECUTE CDCSOURCE demo_print WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\.student,test\\.score'', - ''sink.connector'' = ''print'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:01:45', '2023-10-31 16:02:56');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (219, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE doris' - , 'Whole library synchronization doris', 'EXECUTE CDCSOURCE demo_print WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\.student,test\\.score'', - ''sink.connector'' = ''print'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:02:21', '2023-10-31 16:03:09');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (220, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE demo_doris_schema_evolution' - , 'The entire library is synchronized to doris tape mode evolution', 'EXECUTE CDCSOURCE demo_doris_schema_evolution WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\.student,test\\.score'', - ''sink.connector'' = ''datastream-doris-schema-evolution'', - ''sink.fenodes'' = ''127.0.0.1:8030'', - ''sink.username'' = ''root'', - ''sink.password'' = ''123456'', - ''sink.doris.batch.size'' = ''1000'', - ''sink.sink.max-retries'' = ''1'', - ''sink.sink.batch.interval'' = ''60000'', - ''sink.sink.db'' = ''test'', - ''sink.table.identifier'' = ''${schemaName}.${tableName}'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:04:53', '2023-10-31 16:04:53');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (221, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE StarRocks ' - , 'The entire library is synchronized to StarRocks -', 'EXECUTE CDCSOURCE demo_hudi WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''database-name''=''bigdata'', - ''table-name''=''bigdata\\.products,bigdata\\.orders'', - ''sink.connector''=''hudi'', - ''sink.path''=''hdfs://nameservice1/data/hudi/${tableName}'', - ''sink.hoodie.datasource.write.recordkey.field''=''${pkList}'', - ''sink.hoodie.parquet.max.file.size''=''268435456'', - ''sink.write.tasks''=''1'', - ''sink.write.bucket_assign.tasks''=''2'', - ''sink.write.precombine''=''true'', - ''sink.compaction.async.enabled''=''true'', - ''sink.write.task.max.size''=''1024'', - ''sink.write.rate.limit''=''3000'', - ''sink.write.operation''=''upsert'', - ''sink.table.type''=''COPY_ON_WRITE'', - ''sink.compaction.tasks''=''1'', - ''sink.compaction.delta_seconds''=''20'', - ''sink.compaction.async.enabled''=''true'', - ''sink.read.streaming.skip_compaction''=''true'', - ''sink.compaction.delta_commits''=''20'', - ''sink.compaction.trigger.strategy''=''num_or_time'', - ''sink.compaction.max_memory''=''500'', - ''sink.changelog.enabled''=''true'', - ''sink.read.streaming.enabled''=''true'', - ''sink.read.streaming.check.interval''=''3'', - ''sink.hive_sync.skip_ro_suffix'' = ''true'', - ''sink.hive_sync.enable''=''true'', - ''sink.hive_sync.mode''=''hms'', - ''sink.hive_sync.metastore.uris''=''thrift://bigdata1:9083'', - ''sink.hive_sync.db''=''qhc_hudi_ods'', - ''sink.hive_sync.table''=''${tableName}'', - ''sink.table.prefix.schema''=''true'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:05:50', '2023-10-31 16:08:53');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (222, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_mysql' - , 'The entire library is synchronized to mysql', 'EXECUTE CDCSOURCE demo_startrocks WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector'' = ''starrocks'', - ''sink.jdbc-url'' = ''jdbc:mysql://127.0.0.1:19035'', - ''sink.load-url'' = ''127.0.0.1:18035'', - ''sink.username'' = ''root'', - ''sink.password'' = ''123456'', - ''sink.sink.db'' = ''ods'', - ''sink.table.prefix'' = ''ods_'', - ''sink.table.lower'' = ''true'', - ''sink.database-name'' = ''ods'', - ''sink.table-name'' = ''${tableName}'', - ''sink.sink.properties.format'' = ''json'', - ''sink.sink.properties.strip_outer_array'' = ''true'', - ''sink.sink.max-retries'' = ''10'', - ''sink.sink.buffer-flush.interval-ms'' = ''15000'', - ''sink.sink.parallelism'' = ''1'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:07:08', '2023-10-31 16:08:46');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (223, 'Reference', '建表语句', 'Streaming', 'EXECUTE CDCSOURCE demo_doris' - , 'The entire library is synchronized to mysql', 'EXECUTE CDCSOURCE cdc_mysql WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector'' = ''jdbc'', - ''sink.url'' = ''jdbc:mysql://127.0.0.1:3306/test?characterEncoding=utf-8&useSSL=false'', - ''sink.username'' = ''root'', - ''sink.password'' = ''123456'', - ''sink.sink.db'' = ''test'', - ''sink.table.prefix'' = ''test_'', - ''sink.table.lower'' = ''true'', - ''sink.table-name'' = ''${tableName}'', - ''sink.driver'' = ''com.mysql.jdbc.Driver'', - ''sink.sink.buffer-flush.interval'' = ''2s'', - ''sink.sink.buffer-flush.max-rows'' = ''100'', - ''sink.sink.max-retries'' = ''5'', - ''sink.auto.create'' = ''true'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:07:47', '2023-10-31 16:08:39');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (224, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_oracle' - , 'The entire library is synchronized to cdc_oracle', 'EXECUTE CDCSOURCE cdc_oracle WITH ( - ''connector'' = ''oracle-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''1521'', - ''username''=''root'', - ''password''=''123456'', - ''database-name''=''ORCL'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''TEST\\..*'', - ''connector'' = ''jdbc'', - ''url'' = ''jdbc:oracle:thin:@127.0.0.1:1521:orcl'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''table-name'' = ''TEST2.${tableName}'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:08:30', '2023-10-31 16:08:30');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (225, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_kafka_one' - , 'The entire library is synchronized to a topic in kafka', 'EXECUTE CDCSOURCE cdc_kafka_one WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector''=''datastream-kafka'', - ''sink.topic''=''cdctest'', - ''sink.brokers''=''bigdata2:9092,bigdata3:9092,bigdata4:9092'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:10:13', '2023-10-31 16:10:13');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (226, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_kafka_mul' - , 'The entire library is synchronized to a single topic in kafka', 'EXECUTE CDCSOURCE cdc_kafka_mul WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector''=''datastream-kafka'', - ''sink.brokers''=''bigdata2:9092,bigdata3:9092,bigdata4:9092'' -)', 'All Versions', 0, 1 - , '2023-10-31 16:10:59', '2023-10-31 16:10:59');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (227, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_upsert_kafka' - , 'The entire library is synchronized to kafka primary key mode', 'EXECUTE CDCSOURCE cdc_upsert_kafka WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector'' = ''upsert-kafka'', - ''sink.topic'' = ''${tableName}'', - ''sink.properties.bootstrap.servers'' = ''bigdata2:9092,bigdata3:9092,bigdata4:9092'', - ''sink.key.format'' = ''json'', - ''sink.value.format'' = ''json'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:12:14', '2023-10-31 16:12:14');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (228, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_postgresql ' - , 'The entire library is synchronized to postgresql', 'EXECUTE CDCSOURCE cdc_postgresql WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector'' = ''jdbc'', - ''sink.url'' = ''jdbc:postgresql://127.0.0.1:5432/test'', - ''sink.username'' = ''test'', - ''sink.password'' = ''123456'', - ''sink.sink.db'' = ''test'', - ''sink.table.prefix'' = ''test_'', - ''sink.table.lower'' = ''true'', - ''sink.table-name'' = ''${tableName}'', - ''sink.driver'' = ''org.postgresql.Driver'', - ''sink.sink.buffer-flush.interval'' = ''2s'', - ''sink.sink.buffer-flush.max-rows'' = ''100'', - ''sink.sink.max-retries'' = ''5'' -)', 'All Versions', 0, 1 - , '2023-10-31 16:12:54', '2023-10-31 16:12:54');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (229, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE cdc_clickhouse' - , 'Sync the entire library to clickhouse', 'EXECUTE CDCSOURCE cdc_clickhouse WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''bigdata\\.products,bigdata\\.orders'', - ''sink.connector'' = ''clickhouse'', - ''sink.url'' = ''clickhouse://127.0.0.1:8123'', - ''sink.username'' = ''default'', - ''sink.password'' = ''123456'', - ''sink.sink.db'' = ''test'', - ''sink.table.prefix'' = ''test_'', - ''sink.table.lower'' = ''true'', - ''sink.database-name'' = ''test'', - ''sink.table-name'' = ''${tableName}'', - ''sink.sink.batch-size'' = ''500'', - ''sink.sink.flush-interval'' = ''1000'', - ''sink.sink.max-retries'' = ''3'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:13:33', '2023-10-31 16:13:33');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (230, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2hive' - , 'The entire library is synchronized to the sql-catalog of hive', 'EXECUTE CDCSOURCE mysql2hive WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\..*'', - ''sink.connector'' = ''sql-catalog'', - ''sink.catalog.name'' = ''hive'', - ''sink.catalog.type'' = ''hive'', - ''sink.default-database'' = ''hdb'', - ''sink.hive-conf-dir'' = ''/usr/local/dlink/hive-conf'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:14:31', '2023-10-31 16:14:31');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (231, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2paimon' - , 'The entire library is synchronized to paimon', 'EXECUTE CDCSOURCE mysql2paimon WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\..*'', - ''sink.connector'' = ''sql-catalog'', - ''sink.catalog.name'' = ''fts'', - ''sink.catalog.type'' = ''table-store'', - ''sink.catalog.warehouse''=''file:/tmp/table_store'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:15:22', '2023-10-31 16:28:52');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (232, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2dinky_catalog' - , 'The entire library is synchronized to dinky''s built-in catalog', 'EXECUTE CDCSOURCE mysql2dinky_catalog WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\..*'', - ''sink.connector'' = ''sql-catalog'', - ''sink.catalog.name'' = ''dlinkmysql'', - ''sink.catalog.type'' = ''dlink_mysql'', - ''sink.catalog.username'' = ''dlink'', - ''sink.catalog.password'' = ''dlink'', - ''sink.catalog.url'' = ''jdbc:mysql://127.0.0.1:3306/dlink?useUnicode=true&characterEncoding=utf8&serverTimezone=UTC'', - ''sink.sink.db'' = ''default_database'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:16:22', '2023-10-31 16:16:22');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (233, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE mysql2multiple_sink' - , 'Synchronization of the entire library to multiple data sources (sink)', 'EXECUTE CDCSOURCE mysql2multiple_sink WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''3000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''table-name'' = ''test\\.student,test\\.score'', - ''sink[0].connector'' = ''doris'', - ''sink[0].fenodes'' = ''127.0.0.1:8030'', - ''sink[0].username'' = ''root'', - ''sink[0].password'' = ''dw123456'', - ''sink[0].sink.batch.size'' = ''1'', - ''sink[0].sink.max-retries'' = ''1'', - ''sink[0].sink.batch.interval'' = ''60000'', - ''sink[0].sink.db'' = ''test'', - ''sink[0].table.prefix'' = ''ODS_'', - ''sink[0].table.upper'' = ''true'', - ''sink[0].table.identifier'' = ''${schemaName}.${tableName}'', - ''sink[0].sink.label-prefix'' = ''${schemaName}_${tableName}_1'', - ''sink[0].sink.enable-delete'' = ''true'', - ''sink[1].connector''=''datastream-kafka'', - ''sink[1].topic''=''cdc'', - ''sink[1].brokers''=''127.0.0.1:9092'' -)', 'All Versions', 0, 1 - , '2023-10-31 16:17:27', '2023-10-31 16:17:27');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (234, 'Reference', '建表语句', 'Batch/Streaming', 'ADD JAR' - , 'ADD JAR', 'ADD JAR ${1:}; -- str path ', 'All Versions', 0, 1 - , '2023-10-31 16:19:52', '2023-10-31 16:23:16');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (235, 'Function', 'Other', 'Batch/Streaming', 'SHOW FRAGMENTS' - , 'SHOW FRAGMENTS', 'SHOW FRAGMENTS;', 'All Versions', 0, 1 - , '2023-10-31 16:20:30', '2023-10-31 16:20:30');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (236, 'Function', 'Other', 'Batch/Streaming', 'SHOW FRAGMENT var1' - , 'SHOW FRAGMENT var1', 'SHOW FRAGMENT ${1:};', 'All Versions', 0, 1 - , '2023-10-31 16:21:23', '2023-10-31 16:21:23');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (237, 'Reference', '建表语句', 'Batch/Streaming', 'EXECUTE CDCSOURCE demo_hudi' - , 'The entire library is synchronized to hudi', 'EXECUTE CDCSOURCE demo_hudi WITH ( - ''connector'' = ''mysql-cdc'', - ''hostname'' = ''127.0.0.1'', - ''port'' = ''3306'', - ''username'' = ''root'', - ''password'' = ''123456'', - ''checkpoint'' = ''10000'', - ''scan.startup.mode'' = ''initial'', - ''parallelism'' = ''1'', - ''database-name''=''bigdata'', - ''table-name''=''bigdata\\.products,bigdata\\.orders'', - ''sink.connector''=''hudi'', - ''sink.path''=''hdfs://nameservice1/data/hudi/${tableName}'', - ''sink.hoodie.datasource.write.recordkey.field''=''${pkList}'', - ''sink.hoodie.parquet.max.file.size''=''268435456'', - ''sink.write.tasks''=''1'', - ''sink.write.bucket_assign.tasks''=''2'', - ''sink.write.precombine''=''true'', - ''sink.compaction.async.enabled''=''true'', - ''sink.write.task.max.size''=''1024'', - ''sink.write.rate.limit''=''3000'', - ''sink.write.operation''=''upsert'', - ''sink.table.type''=''COPY_ON_WRITE'', - ''sink.compaction.tasks''=''1'', - ''sink.compaction.delta_seconds''=''20'', - ''sink.compaction.async.enabled''=''true'', - ''sink.read.streaming.skip_compaction''=''true'', - ''sink.compaction.delta_commits''=''20'', - ''sink.compaction.trigger.strategy''=''num_or_time'', - ''sink.compaction.max_memory''=''500'', - ''sink.changelog.enabled''=''true'', - ''sink.read.streaming.enabled''=''true'', - ''sink.read.streaming.check.interval''=''3'', - ''sink.hive_sync.skip_ro_suffix'' = ''true'', - ''sink.hive_sync.enable''=''true'', - ''sink.hive_sync.mode''=''hms'', - ''sink.hive_sync.metastore.uris''=''thrift://bigdata1:9083'', - ''sink.hive_sync.db''=''qhc_hudi_ods'', - ''sink.hive_sync.table''=''${tableName}'', - ''sink.table.prefix.schema''=''true'' -);', 'All Versions', 0, 1 - , '2023-10-31 16:24:47', '2023-10-31 16:24:47');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (238, 'Reference', 'Other', 'Batch/Streaming', 'EXECUTE JAR ' - , 'EXECUTE JAR use sql', 'EXECUTE JAR WITH ( -''uri''=''file:///opt/flink/lib/paimon-flink-1.16-0.5-20230818.001833-127.jar'', -''main-class''=''org.apache.paimon.flink.action.FlinkActions'', -''args''=''mysql-sync-table --warehouse hdfs:///save --database cdc-test --table cdc_test1 --primary-keys id --mysql-conf hostname=121.5.136.161 --mysql-conf port=3371 --mysql-conf username=root --mysql-conf password=dinky --mysql-conf database-name=cdc-test --mysql-conf table-name=table_1 --mysql-conf server-time-zone=Asia/Shanghai --table-conf bucket=4 --table-conf changelog-producer=input --table-conf sink.parallelism=1'', -''parallelism''='''', -''savepoint-path''='''' -);', 'All Versions', 0, 1 - , '2023-10-31 16:27:53', '2023-10-31 16:27:53');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (239, 'Reference', '内置函数', 'Streaming', 'PRINT tablename' - , 'PRINT table data', 'PRINT ${1:}', 'All Versions', 0, 1 - , '2023-10-31 16:30:22', '2023-10-31 16:30:22');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (240, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE Like' - , 'CREATE TABLE Like source table', 'DROP TABLE IF EXISTS sink_table; -CREATE TABLE IF not EXISTS sink_table -WITH ( - ''topic'' = ''motor_vehicle_error'' -) -LIKE source_table;', 'All Versions', 0, 1 - , '2023-10-31 16:33:38', '2023-10-31 16:33:38');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (241, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE like source_table EXCLUDING' - , 'CREATE TABLE like source_table EXCLUDING', 'DROP TABLE IF EXISTS sink_table; -CREATE TABLE IF not EXISTS sink_table( - -- Add watermark definition - WATERMARK FOR order_time AS order_time - INTERVAL ''5'' SECOND -) -WITH ( - ''topic'' = ''motor_vehicle_error'' -) -LIKE source_table ( - -- Exclude everything besides the computed columns which we need to generate the watermark for. - -- We do not want to have the partitions or filesystem options as those do not apply to kafka. - EXCLUDING ALL - INCLUDING GENERATED -);', 'All Versions', 0, 1 - , '2023-10-31 16:36:13', '2023-10-31 16:36:13');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (242, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE ctas_kafka' - , 'CREATE TABLE ctas_kafka', 'CREATE TABLE my_ctas_table -WITH ( - ''connector'' = ''kafka'' -) -AS SELECT id, name, age FROM source_table WHERE mod(id, 10) = 0;', 'All Versions', 0, 1 - , '2023-10-31 16:37:33', '2023-10-31 16:47:17');; -INSERT INTO `dinky_flink_document` (`id`, `category`, `type`, `subtype`, `name` - , `description`, `fill_value`, `version`, `like_num`, `enabled` - , `create_time`, `update_time`) -VALUES (243, 'Reference', '建表语句', 'Batch/Streaming', 'CREATE TABLE rtas_kafka' - , 'CREATE TABLE rtas_kafka', 'CREATE OR REPLACE TABLE my_ctas_table -WITH ( - ''connector'' = ''kafka'' -) -AS SELECT id, name, age FROM source_table WHERE mod(id, 10) = 0;', 'All Versions', 0, 1 - , '2023-10-31 16:41:46', '2023-10-31 16:43:29');; - - - - - -DROP TABLE IF EXISTS `dinky_fragment`; -CREATE TABLE `dinky_fragment` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `name` varchar(50) NOT null COMMENT 'fragment name', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `fragment_value` text NOT null COMMENT 'fragment value', - `note` text null COMMENT 'note', - `enabled` tinyint(4) null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_history`; -CREATE TABLE `dinky_history` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `cluster_id` int(11) NOT null DEFAULT 0 COMMENT 'cluster ID', - `cluster_configuration_id` int(11) null DEFAULT null COMMENT 'cluster configuration id', - `session` varchar(255) null DEFAULT null COMMENT 'session', - `job_id` varchar(50) null DEFAULT null COMMENT 'Job ID', - `job_name` varchar(255) null DEFAULT null COMMENT 'Job Name', - `job_manager_address` varchar(255) null DEFAULT null COMMENT 'JJobManager Address', - `status` int(11) NOT null DEFAULT 0 COMMENT 'status', - `type` varchar(50) null DEFAULT null COMMENT 'job type', - `statement` text null COMMENT 'statement set', - `error` text null COMMENT 'error message', - `result` text null COMMENT 'result set', - `config_json` json null COMMENT 'config json', - `start_time` datetime(0) null DEFAULT null COMMENT 'job start time', - `end_time` datetime(0) null DEFAULT null COMMENT 'job end time', - `task_id` int(11) null DEFAULT null COMMENT 'task ID', - INDEX task_index1(`task_id`), - INDEX cluster_index2(`cluster_id`) -) ENGINE = InnoDB ROW_FORMAT = Dynamic; - -DROP TABLE IF EXISTS `dinky_job_history`; -CREATE TABLE `dinky_job_history` ( - `id` int(11) NOT null COMMENT 'id', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `job_json` json null COMMENT 'Job information json', - `exceptions_json` json null COMMENT 'error message json', - `checkpoints_json` json null COMMENT 'checkpoints json', - `checkpoints_config_json` json null COMMENT 'checkpoints configuration json', - `config_json` json null COMMENT 'configuration', - `cluster_json` json null COMMENT 'cluster instance configuration', - `cluster_configuration_json` json null COMMENT 'cluster config', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_job_instance`; -CREATE TABLE `dinky_job_instance` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `name` varchar(255) null DEFAULT null COMMENT 'job instance name', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `task_id` int(11) null DEFAULT null COMMENT 'task ID', - `step` int(11) null DEFAULT null COMMENT 'job lifecycle', - `cluster_id` int(11) null DEFAULT null COMMENT 'cluster ID', - `jid` varchar(50) null DEFAULT null COMMENT 'Flink JobId', - `status` varchar(50) null DEFAULT null COMMENT 'job instance status', - `history_id` int(11) null DEFAULT null COMMENT 'execution history ID', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `finish_time` datetime(0) null DEFAULT null COMMENT 'finish time', - `duration` bigint(20) null DEFAULT null COMMENT 'job duration', - `error` text null COMMENT 'error logs', - `failed_restart_count` int(11) null DEFAULT null COMMENT 'failed restart count', - INDEX job_instance_task_id_idx13(`task_id`) -) ENGINE = InnoDB ROW_FORMAT = Dynamic; - -DROP TABLE IF EXISTS `dinky_role`; -CREATE TABLE `dinky_role` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_id` int(11) NOT null COMMENT 'tenant id', - `role_code` varchar(64) NOT null COMMENT 'role code', - `role_name` varchar(64) NOT null COMMENT 'role name', - `is_delete` tinyint(1) NOT null DEFAULT 0 COMMENT 'is delete', - `note` varchar(255) null DEFAULT null COMMENT 'note', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -INSERT INTO `dinky_role` -VALUES (1, 1, 'SuperAdmin', 'SuperAdmin', 0 - , 'SuperAdmin of Role', '2022-12-13 05:27:19', '2022-12-13 05:27:19'); - -DROP TABLE IF EXISTS `dinky_savepoints`; -CREATE TABLE `dinky_savepoints` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `task_id` int(11) NOT null COMMENT 'task ID', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `name` varchar(255) NOT null COMMENT 'task name', - `type` varchar(255) NOT null COMMENT 'savepoint type', - `path` varchar(255) NOT null COMMENT 'savepoint path', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; - -DROP TABLE IF EXISTS `dinky_sys_config`; -CREATE TABLE `dinky_sys_config` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `name` varchar(255) NOT null COMMENT 'configuration name', - `value` text null COMMENT 'configuration value', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_task`; -CREATE TABLE `dinky_task` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `name` varchar(255) NOT null COMMENT 'Job name', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `dialect` varchar(50) null DEFAULT null COMMENT 'dialect', - `type` varchar(50) null DEFAULT null COMMENT 'Job type', - `check_point` int(11) null DEFAULT null COMMENT 'CheckPoint trigger seconds', - `save_point_strategy` int(11) null DEFAULT null COMMENT 'SavePoint strategy', - `save_point_path` varchar(255) null DEFAULT null COMMENT 'SavePointPath', - `parallelism` int(11) null DEFAULT null COMMENT 'parallelism', - `fragment` tinyint(1) null DEFAULT 0 COMMENT 'fragment', - `statement_set` tinyint(1) null DEFAULT 0 COMMENT 'enable statement set', - `batch_model` tinyint(1) null DEFAULT 0 COMMENT 'use batch model', - `cluster_id` int(11) null DEFAULT null COMMENT 'Flink cluster ID', - `cluster_configuration_id` int(11) null DEFAULT null COMMENT 'cluster configuration ID', - `database_id` int(11) null DEFAULT null COMMENT 'database ID', - `env_id` int(11) null DEFAULT null COMMENT 'env id', - `alert_group_id` bigint(20) null DEFAULT null COMMENT 'alert group id', - `config_json` text null COMMENT 'configuration json', - `note` varchar(255) null DEFAULT null COMMENT 'Job Note', - `step` int(11) null DEFAULT 1 COMMENT 'Job lifecycle', - `job_instance_id` bigint(20) null DEFAULT null COMMENT 'job instance id', - `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `version_id` int(11) null DEFAULT null COMMENT 'version id', - `statement` text null DEFAULT null COMMENT 'statement' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; - -DROP TABLE IF EXISTS `dinky_task_version`; -CREATE TABLE `dinky_task_version` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `task_id` int(11) NOT null COMMENT 'task ID ', - `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', - `version_id` int(11) NOT null COMMENT 'version ID ', - `statement` text null COMMENT 'flink sql statement', - `name` varchar(255) NOT null COMMENT 'version name', - `dialect` varchar(50) null DEFAULT null COMMENT 'dialect', - `type` varchar(50) null DEFAULT null COMMENT 'type', - `task_configure` text NOT null COMMENT 'task configuration', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_tenant`; -CREATE TABLE `dinky_tenant` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `tenant_code` varchar(64) NOT null COMMENT 'tenant code', - `is_delete` tinyint(1) NOT null DEFAULT 0 COMMENT 'is delete', - `note` varchar(255) null DEFAULT null COMMENT 'note', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -INSERT INTO `dinky_tenant` -VALUES (1, 'DefaultTenant', 0, 'DefaultTenant', '2022-12-13 05:27:19' - , '2022-12-13 05:27:19'); -DROP TABLE IF EXISTS `dinky_udf`; -CREATE TABLE `dinky_udf` ( - `id` int(11) NOT null AUTO_INCREMENT, - `name` varchar(200) null DEFAULT null COMMENT 'udf name', - `class_name` varchar(200) null DEFAULT null COMMENT 'Complete class name', - `source_code` longtext null COMMENT 'source code', - `compiler_code` binary(255) null DEFAULT null COMMENT 'compiler product', - `version_id` int(11) null DEFAULT null COMMENT 'version', - `version_description` varchar(50) null DEFAULT null COMMENT 'version description', - `is_default` tinyint(1) null DEFAULT null COMMENT 'Is it default', - `document_id` int(11) null DEFAULT null COMMENT 'corresponding to the document id', - `from_version_id` int(11) null DEFAULT null COMMENT 'Based on udf version id', - `code_md5` varchar(50) null DEFAULT null COMMENT 'source code of md5', - `dialect` varchar(50) null DEFAULT null COMMENT 'dialect', - `type` varchar(50) null DEFAULT null COMMENT 'type', - `step` int(11) null DEFAULT null COMMENT 'job lifecycle step', - `enable` tinyint(1) null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime DEFAULT null ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_udf_template`; -CREATE TABLE `dinky_udf_template` ( - `id` int(11) NOT null AUTO_INCREMENT, - `name` varchar(100) null DEFAULT null COMMENT 'template name', - `code_type` varchar(10) null DEFAULT null COMMENT 'code type', - `function_type` varchar(10) null DEFAULT null COMMENT 'function type', - `template_code` longtext null COMMENT 'code', - `enabled` tinyint(1) not null DEFAULT 1 COMMENT 'is enable', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime DEFAULT null ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -INSERT INTO `dinky_udf_template` -VALUES (1, 'java_udf', 'Java', 'UDF', '${(package=='''')?string('''',''package ''+package+'';'')} - -import org.apache.flink.table.functions.ScalarFunction; - -public class ${className} extends ScalarFunction { - public String eval(String s) { - return null; - } -}' - , 1, '2022-10-19 09:17:37', '2022-10-25 17:45:57'); -INSERT INTO `dinky_udf_template` -VALUES (2, 'java_udtf', 'Java', 'UDTF', '${(package=='''')?string('''',''package ''+package+'';'')} - -import org.apache.flink.table.functions.ScalarFunction; - -@FunctionHint(output = @DataTypeHint("ROW")) -public static class ${className} extends TableFunction { - - public void eval(String str) { - for (String s : str.split(" ")) { - // use collect(...) to emit a row - collect(Row.of(s, s.length())); - } - } -}' - , 1, '2022-10-19 09:22:58', '2022-10-25 17:49:30'); -INSERT INTO `dinky_udf_template` -VALUES (3, 'scala_udf', 'Scala', 'UDF', '${(package=='''')?string('''',''package ''+package+'';'')} - -import org.apache.flink.table.api._ -import org.apache.flink.table.functions.ScalarFunction - -// 定义可参数化的函数逻辑 -class ${className} extends ScalarFunction { - def eval(s: String, begin: Integer, end: Integer): String = { - "this is scala" - } -}' - , 1, '2022-10-25 09:21:32', '2022-10-25 17:49:46'); -INSERT INTO `dinky_udf_template` -VALUES (4, 'python_udf_1', 'Python', 'UDF', 'from pyflink.table import ScalarFunction, DataTypes -from pyflink.table.udf import udf - -class ${className}(ScalarFunction): - def __init__(self): - pass - - def eval(self, variable): - return str(variable) - - -${attr!''f''} = udf(${className}(), result_type=DataTypes.STRING())' - , 1, '2022-10-25 09:23:07', '2022-10-25 09:34:01'); -INSERT INTO `dinky_udf_template` -VALUES (5, 'python_udf_2', 'Python', 'UDF', 'from pyflink.table import DataTypes -from pyflink.table.udf import udf - -@udf(result_type=DataTypes.STRING()) -def ${className}(variable1:str): - return ''''' - , 1, '2022-10-25 09:25:13', '2022-10-25 09:34:47'); - - -DROP TABLE IF EXISTS `dinky_user`; -CREATE TABLE `dinky_user` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `username` varchar(50) NOT null COMMENT 'username', - `user_type` int DEFAULT 0 NOT null COMMENT 'login type (0:LOCAL,1:LDAP)', - `password` varchar(50) null DEFAULT null COMMENT 'password', - `nickname` varchar(50) null DEFAULT null COMMENT 'nickname', - `worknum` varchar(50) null DEFAULT null COMMENT 'worknum', - `avatar` blob null COMMENT 'avatar', - `mobile` varchar(20) null DEFAULT null COMMENT 'mobile phone', - `enabled` tinyint(1) NOT null DEFAULT 1 COMMENT 'is enable', - `super_admin_flag` tinyint(1) DEFAULT '0' COMMENT 'is super admin(0:false,1true)', - `is_delete` tinyint(1) NOT null DEFAULT 0 COMMENT 'is delete', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -INSERT INTO `dinky_user` -VALUES (1, 'admin', 1,'21232f297a57a5a743894a0e4a801fc3', 'Admin', 'Dinky-001' - , null, '17777777777', 1,1, 0, '2022-12-13 05:27:19' - , '2022-12-13 05:27:19'); -DROP TABLE IF EXISTS `dinky_user_role`; -CREATE TABLE `dinky_user_role` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `user_id` int(11) NOT null COMMENT 'user id', - `role_id` int(11) NOT null COMMENT 'role id', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -INSERT INTO `dinky_user_role` -VALUES (1, 1, 1, '2022-12-13 05:27:19', '2022-12-13 05:27:19'); -DROP TABLE IF EXISTS `dinky_user_tenant`; -CREATE TABLE `dinky_user_tenant` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', - `user_id` int(11) NOT null COMMENT 'user id', - `tenant_id` int(11) NOT null COMMENT 'tenant id', - `tenant_admin_flag` tinyint DEFAULT '0' COMMENT 'tenant admin flag(0:false,1:true)', - `create_time` datetime(0) null DEFAULT null COMMENT 'create time', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -INSERT INTO `dinky_user_tenant` (`id`, `user_id`, `tenant_id`, `create_time`, `update_time`) -VALUES (1, 1, 1, current_time, current_time); -DROP TABLE IF EXISTS `metadata_column`; -CREATE TABLE `metadata_column` ( - `column_name` varchar(255) NOT null COMMENT 'column name', - `column_type` varchar(255) NOT null COMMENT 'column type, such as : Physical , Metadata , Computed , WATERMARK', - `data_type` varchar(255) NOT null COMMENT 'data type', - `expr` varchar(255) null DEFAULT null COMMENT 'expression', - `description` varchar(255) NOT null COMMENT 'column description', - `table_id` int(11) NOT null COMMENT 'table id', - `primary` bit(1) null DEFAULT null COMMENT 'table primary key', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_database`; -CREATE TABLE `metadata_database` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', - `database_name` varchar(255) NOT null COMMENT 'database name', - `description` varchar(255) null DEFAULT null COMMENT 'database description', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_database_property`; -CREATE TABLE `metadata_database_property` ( - `key` varchar(255) NOT null COMMENT 'key', - `value` varchar(255) null DEFAULT null COMMENT 'value', - `database_id` int(11) NOT null COMMENT 'database id', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_function`; -CREATE TABLE `metadata_function` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT '主键', - `function_name` varchar(255) NOT null COMMENT 'function name', - `class_name` varchar(255) NOT null COMMENT 'class name', - `database_id` int(11) NOT null COMMENT 'database id', - `function_language` varchar(255) null DEFAULT null COMMENT 'function language', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_table`; -CREATE TABLE `metadata_table` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT '主键', - `table_name` varchar(255) NOT null COMMENT 'table name', - `table_type` varchar(255) NOT null COMMENT 'type,such as:database,table,view', - `database_id` int(11) NOT null COMMENT 'database id', - `description` varchar(255) null DEFAULT null COMMENT 'table description', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_table_property`; -CREATE TABLE `metadata_table_property` ( - `key` varchar(255) NOT null COMMENT 'key', - `value` mediumtext null COMMENT 'value', - `table_id` int(11) NOT null COMMENT 'table id', - `update_time` datetime(0) null DEFAULT null COMMENT 'update time', - `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create tiime' -) ENGINE = InnoDB ROW_FORMAT = Dynamic; --- ---------------------------- --- Records of metadata_table_property --- ---------------------------- --- ---------------------------- --- Table structure for dinky_row_permissions --- ---------------------------- -DROP TABLE IF EXISTS `dinky_row_permissions`; -CREATE TABLE dinky_row_permissions ( - id int PRIMARY KEY AUTO_INCREMENT COMMENT 'ID', - role_id int NOT null COMMENT '角色ID', - table_name varchar(255) null COMMENT '表名', - expression varchar(255) null COMMENT '表达式', - create_time datetime null COMMENT '创建时间', - update_time datetime null COMMENT '更新时间' -); -SET FOREIGN_KEY_CHECKS = 1; -DROP TABLE IF EXISTS `dinky_git_project`; -CREATE TABLE `dinky_git_project` ( - `id` bigint(20) NOT null AUTO_INCREMENT, - `tenant_id` bigint(20) NOT null, - `name` varchar(255) NOT null, - `url` varchar(1000) NOT null, - `branch` varchar(1000) NOT null, - `username` varchar(255) DEFAULT null, - `password` varchar(255) DEFAULT null, - `private_key` varchar(255) DEFAULT null COMMENT 'keypath', - `pom` varchar(255) DEFAULT null, - `build_args` varchar(255) DEFAULT null, - `code_type` tinyint(4) DEFAULT null COMMENT 'code type(1-java,2-python)', - `type` tinyint(4) NOT null COMMENT '1-http ,2-ssh', - `last_build` datetime DEFAULT null, - `description` varchar(255) DEFAULT null, - `build_state` tinyint(2) NOT null DEFAULT '0' COMMENT '0-notStart 1-process 2-failed 3-success', - `build_step` tinyint(2) NOT null DEFAULT '0', - `enabled` tinyint(1) NOT null DEFAULT '1' COMMENT '0-disable 1-enable', - `udf_class_map_list` text COMMENT 'scan udf class', - `order_line` int(11) NOT null DEFAULT '1' COMMENT 'order', - `create_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', - `update_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' -) ENGINE = InnoDB; -INSERT INTO `dinky_git_project` (`id`, `tenant_id`, `name`, `url`, `branch` - , `username`, `password`, `private_key`, `pom`, `build_args` - , `code_type`, `type`, `last_build`, `description`, `build_state` - , `build_step`, `enabled`, `udf_class_map_list`, `order_line`) -VALUES (1, 1, 'java-udf', 'https://github.com/zackyoungh/dinky-quickstart-java.git', 'master' - , null, null, null, null, '-P flink-1.14' - , 1, 1, null, null, 0 - , 0, 1, '[]', 1); -INSERT INTO `dinky_git_project` (`id`, `tenant_id`, `name`, `url`, `branch` - , `username`, `password`, `private_key`, `pom`, `build_args` - , `code_type`, `type`, `last_build`, `description`, `build_state` - , `build_step`, `enabled`, `udf_class_map_list`, `order_line`) -VALUES (2, 1, 'python-udf', 'https://github.com/zackyoungh/dinky-quickstart-python.git', 'master' - , null, null, null, null, '' - , 2, 1, null, null, 0 - , 0, 1, '[]', 2); - -DROP TABLE IF EXISTS dinky_metrics; -CREATE TABLE `dinky_metrics` ( - `id` int(11) NOT null AUTO_INCREMENT, - `task_id` int(255) DEFAULT null, - `vertices` varchar(255) DEFAULT null, - `metrics` varchar(255) DEFAULT null, - `position` int(11) DEFAULT null, - `show_type` varchar(255) DEFAULT null, - `show_size` varchar(255) DEFAULT null, - `title` CLOB DEFAULT null, - `layout_name` varchar(255) DEFAULT null, - `create_time` datetime DEFAULT null, - `update_time` datetime DEFAULT null -) ENGINE = InnoDB; - -DROP TABLE IF EXISTS dinky_resources; -CREATE TABLE `dinky_resources` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'key', - `file_name` varchar(64) DEFAULT null COMMENT 'file name', - `description` varchar(255) DEFAULT null, - `user_id` int(11) DEFAULT null COMMENT 'user id', - `type` tinyint(4) DEFAULT null COMMENT 'resource type,0:FILE,1:UDF', - `size` bigint(20) DEFAULT null COMMENT 'resource size', - `pid` int(11) DEFAULT null, - `full_name` varchar(128) DEFAULT null, - `is_directory` tinyint(4) DEFAULT null, - `create_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', - `update_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' -) ENGINE = InnoDB; -INSERT INTO `dinky_resources` (`id`, `file_name`, `description`, `user_id`, `type`, `size`, `pid`, `full_name`, `is_directory`) VALUES (0, 'Root', 'main folder', 1, 0, 0, -1, '/', 1); - - --- ---------------------------- --- Table structure for dinky_sys_login_log --- ---------------------------- -DROP TABLE IF EXISTS dinky_sys_login_log; -CREATE TABLE `dinky_sys_login_log` ( - `id` int(11) NOT null AUTO_INCREMENT COMMENT 'key', - `user_id` int(11) NOT null COMMENT 'user id', - `username` varchar(60) NOT null COMMENT 'username', - `login_type` int NOT null COMMENT 'login type(0:LOCAL,1:LDAP)', - `ip` varchar(40) NOT null COMMENT 'ip addr', - `status` int NOT null COMMENT 'login status', - `msg` text NOT null COMMENT 'status msg', - `create_time` datetime NOT null COMMENT 'create time', - `access_time` datetime DEFAULT null COMMENT 'access time', - `update_time` datetime NOT null, - `is_deleted` tinyint(1) NOT null DEFAULT '0', - PRIMARY KEY (`id`) -) ENGINE=InnoDB; - - --- ---------------------------- --- Table structure for dinky_sys_operate_log --- ---------------------------- -DROP TABLE IF EXISTS `dinky_sys_operate_log`; -CREATE TABLE `dinky_sys_operate_log` ( - `id` bigint NOT null AUTO_INCREMENT COMMENT 'id', - `module_name` varchar(50) DEFAULT '' COMMENT 'module name', - `business_type` int null DEFAULT 0 COMMENT 'business type', - `method` varchar(100) null DEFAULT '' COMMENT 'method name', - `request_method` varchar(10) null DEFAULT '' COMMENT 'request method', - `operate_name` varchar(50) DEFAULT '' COMMENT 'operate name', - `operate_user_id` int NOT null COMMENT 'operate user id', - `operate_url` varchar(255) DEFAULT '' COMMENT 'operate url', - `operate_ip` varchar(50) DEFAULT '' COMMENT 'ip', - `operate_location` varchar(255) DEFAULT '' COMMENT 'operate location', - `operate_param` longtext DEFAULT '' COMMENT 'request param', - `json_result` longtext DEFAULT null COMMENT 'return json result', - `status` int null DEFAULT null COMMENT 'operate status', - `error_msg` longtext DEFAULT null COMMENT 'error msg', - `operate_time` datetime(0) DEFAULT null COMMENT 'operate time', - PRIMARY KEY (`id`) -) ENGINE = InnoDB; - - - --- ---------------------------- --- Table structure for dinky_sys_menu --- ---------------------------- -drop table if exists `dinky_sys_menu`; -create table `dinky_sys_menu` ( - `id` bigint not null auto_increment comment ' id', - `parent_id` bigint not null comment 'parent menu id', - `name` varchar(64) not null comment 'menu button name', - `path` varchar(64) default null comment 'routing path', - `component` varchar(64) default null comment 'routing component component', - `perms` varchar(64) default null comment 'authority id', - `icon` varchar(64) default null comment 'icon', - `type` char(1) default null comment 'type(M:directory C:menu F:button)', - `display` tinyint default 1 comment 'whether the menu is displayed', - `order_num` int default null comment 'sort', - `create_time` datetime not null default current_timestamp comment 'create time', - `update_time` datetime not null default current_timestamp on update current_timestamp comment 'modify time', - `note` varchar(255) default null comment 'note', - primary key (`id`) -) engine=innodb ; - -INSERT INTO `dinky_sys_menu` VALUES (1, -1, '首页', '/home', './Home', 'home', 'HomeOutlined', 'C', 0, 1, '2023-08-11 14:06:52', '2023-09-25 18:26:45', null); -INSERT INTO `dinky_sys_menu` VALUES (2, -1, '运维中心', '/devops', null, 'devops', 'ControlOutlined', 'M', 0, 20, '2023-08-11 14:06:52', '2023-09-26 14:53:34', null); -INSERT INTO `dinky_sys_menu` VALUES (3, -1, '注册中心', '/registration', null, 'registration', 'AppstoreOutlined', 'M', 0, 23, '2023-08-11 14:06:52', '2023-09-26 14:54:03', null); -INSERT INTO `dinky_sys_menu` VALUES (4, -1, '认证中心', '/auth', null, 'auth', 'SafetyCertificateOutlined', 'M', 0, 79, '2023-08-11 14:06:52', '2023-09-26 15:08:42', null); -INSERT INTO `dinky_sys_menu` VALUES (5, -1, '数据开发', '/datastudio', './DataStudio', 'datastudio', 'CodeOutlined', 'C', 0, 4, '2023-08-11 14:06:52', '2023-09-26 14:49:12', null); -INSERT INTO `dinky_sys_menu` VALUES (6, -1, '配置中心', '/settings', null, 'settings', 'SettingOutlined', 'M', 0, 115, '2023-08-11 14:06:53', '2023-09-26 15:16:03', null); -INSERT INTO `dinky_sys_menu` VALUES (7, -1, '关于', '/about', './Other/About', 'about', 'SmileOutlined', 'C', 0, 143, '2023-08-11 14:06:53', '2023-09-26 15:21:21', null); -INSERT INTO `dinky_sys_menu` VALUES (8, -1, '监控', '/metrics', './Metrics', 'metrics', 'DashboardOutlined', 'C', 0, 140, '2023-08-11 14:06:53', '2023-09-26 15:20:49', null); -INSERT INTO `dinky_sys_menu` VALUES (9, 3, '集群', '/registration/cluster', null, 'registration:cluster', 'GoldOutlined', 'M', 0, 24, '2023-08-11 14:06:54', '2023-09-26 14:54:19', null); -INSERT INTO `dinky_sys_menu` VALUES (10, 3, '数据源', '/registration/datasource', './RegCenter/DataSource', 'registration:datasource', 'DatabaseOutlined', 'M', 0, 37, '2023-08-11 14:06:54', '2023-09-26 14:59:31', null); -INSERT INTO `dinky_sys_menu` VALUES (11, -1, '个人中心', '/account/center', './Other/PersonCenter', 'account:center', 'UserOutlined', 'C', 0, 144, '2023-08-11 14:06:54', '2023-09-26 15:21:29', null); -INSERT INTO `dinky_sys_menu` VALUES (12, 3, '告警', '/registration/alert', null, 'registration:alert', 'AlertOutlined', 'M', 0, 43, '2023-08-11 14:06:54', '2023-09-26 15:01:32', null); -INSERT INTO `dinky_sys_menu` VALUES (13, 3, '文档', '/registration/document', './RegCenter/Document', 'registration:document', 'BookOutlined', 'C', 0, 55, '2023-08-11 14:06:54', '2023-09-26 15:03:59', null); -INSERT INTO `dinky_sys_menu` VALUES (14, 3, '全局变量', '/registration/fragment', './RegCenter/GlobalVar', 'registration:fragment', 'RocketOutlined', 'C', 0, 59, '2023-08-11 14:06:54', '2023-09-26 15:04:55', null); -INSERT INTO `dinky_sys_menu` VALUES (15, 3, 'Git 项目', '/registration/gitproject', './RegCenter/GitProject', 'registration:gitproject', 'GithubOutlined', 'C', 0, 63, '2023-08-11 14:06:54', '2023-09-26 15:05:37', null); -INSERT INTO `dinky_sys_menu` VALUES (16, 3, 'UDF 模版', '/registration/udf', './RegCenter/UDF', 'registration:udf', 'ToolOutlined', 'C', 0, 69, '2023-08-11 14:06:54', '2023-09-26 15:06:40', null); -INSERT INTO `dinky_sys_menu` VALUES (17, 2, 'job-detail', '/devops/job-detail', './DevOps/JobDetail', 'devops:job-detail', 'InfoCircleOutlined', 'C', 0, 22, '2023-08-11 14:06:54', '2023-09-26 14:53:53', null); -INSERT INTO `dinky_sys_menu` VALUES (18, 2, 'job', '/devops/joblist', './DevOps', 'devops:joblist', 'AppstoreFilled', 'C', 0, 21, '2023-08-11 14:06:54', '2023-09-26 14:53:43', null); -INSERT INTO `dinky_sys_menu` VALUES (19, 3, '资源中心', '/registration/resource', './RegCenter/Resource', 'registration:resource', 'FileZipOutlined', 'C', 0, 73, '2023-08-11 14:06:54', '2023-09-26 15:07:25', null); -INSERT INTO `dinky_sys_menu` VALUES (20, 4, '角色', '/auth/role', './AuthCenter/Role', 'auth:role', 'TeamOutlined', 'C', 0, 88, '2023-08-11 14:06:54', '2023-09-26 15:10:19', null); -INSERT INTO `dinky_sys_menu` VALUES (21, 4, '用户', '/auth/user', './AuthCenter/User', 'auth:user', 'UserOutlined', 'C', 0, 80, '2023-08-11 14:06:54', '2023-09-26 15:08:51', null); -INSERT INTO `dinky_sys_menu` VALUES (22, 4, '菜单', '/auth/menu', './AuthCenter/Menu', 'auth:menu', 'MenuOutlined', 'C', 0, 94, '2023-08-11 14:06:54', '2023-09-26 15:11:34', null); -INSERT INTO `dinky_sys_menu` VALUES (23, 4, '租户', '/auth/tenant', './AuthCenter/Tenant', 'auth:tenant', 'SecurityScanOutlined', 'C', 0, 104, '2023-08-11 14:06:54', '2023-09-26 15:13:35', null); -INSERT INTO `dinky_sys_menu` VALUES (24, 6, '全局设置', '/settings/globalsetting', './SettingCenter/GlobalSetting', 'settings:globalsetting', 'SettingOutlined', 'C', 0, 116, '2023-08-11 14:06:54', '2023-09-26 15:16:12', null); -INSERT INTO `dinky_sys_menu` VALUES (25, 6, '系统日志', '/settings/systemlog', './SettingCenter/SystemLogs', 'settings:systemlog', 'InfoCircleOutlined', 'C', 0, 131, '2023-08-11 14:06:55', '2023-09-26 15:18:53', null); -INSERT INTO `dinky_sys_menu` VALUES (26, 6, '进程', '/settings/process', './SettingCenter/Process', 'settings:process', 'ReconciliationOutlined', 'C', 0, 135, '2023-08-11 14:06:55', '2023-09-26 15:19:35', null); -INSERT INTO `dinky_sys_menu` VALUES (27, 4, '行权限', '/auth/rowpermissions', './AuthCenter/RowPermissions', 'auth:rowpermissions', 'SafetyCertificateOutlined', 'C', 0, 100, '2023-08-11 14:06:55', '2023-09-26 15:12:46', null); -INSERT INTO `dinky_sys_menu` VALUES (28, 9, 'Flink 实例', '/registration/cluster/instance', './RegCenter/Cluster/Instance', 'registration:cluster:instance', 'ReconciliationOutlined', 'C', 0, 25, '2023-08-11 14:06:55', '2023-09-26 14:54:29', null); -INSERT INTO `dinky_sys_menu` VALUES (29, 12, '告警组', '/registration/alert/group', './RegCenter/Alert/AlertGroup', 'registration:alert:group', 'AlertOutlined', 'C', 0, 48, '2023-08-11 14:06:55', '2023-09-26 15:02:23', null); -INSERT INTO `dinky_sys_menu` VALUES (30, 9, '集群配置', '/registration/cluster/config', './RegCenter/Cluster/Configuration', 'registration:cluster:config', 'SettingOutlined', 'C', 0, 31, '2023-08-11 14:06:55', '2023-09-26 14:57:57', null); -INSERT INTO `dinky_sys_menu` VALUES (31, 12, '告警实例', '/registration/alert/instance', './RegCenter/Alert/AlertInstance', 'registration:alert:instance', 'AlertFilled', 'C', 0, 44, '2023-08-11 14:06:55', '2023-09-26 15:01:42', null); -INSERT INTO `dinky_sys_menu` VALUES (32, 1, '作业监控', '/home/jobOverView', 'JobOverView', 'home:jobOverView', 'AntCloudOutlined', 'F', 0, 2, '2023-08-15 16:52:59', '2023-09-26 14:48:50', null); -INSERT INTO `dinky_sys_menu` VALUES (33, 1, '数据开发', '/home/devOverView', 'DevOverView', 'home:devOverView', 'AimOutlined', 'F', 0, 3, '2023-08-15 16:54:47', '2023-09-26 14:49:00', null); -INSERT INTO `dinky_sys_menu` VALUES (34, 5, '项目列表', '/datastudio/left/project', null, 'datastudio:left:project', 'ConsoleSqlOutlined', 'F', 0, 5, '2023-09-01 18:00:39', '2023-09-26 14:49:31', null); -INSERT INTO `dinky_sys_menu` VALUES (35, 5, '数据源', '/datastudio/left/datasource', null, 'datastudio:left:datasource', 'TableOutlined', 'F', 0, 7, '2023-09-01 18:01:09', '2023-09-26 14:49:42', null); -INSERT INTO `dinky_sys_menu` VALUES (36, 5, 'catalog', '/datastudio/left/catalog', null, 'datastudio:left:structure', 'DatabaseOutlined', 'F', 0, 6, '2023-09-01 18:01:30', '2023-09-26 14:49:54', null); -INSERT INTO `dinky_sys_menu` VALUES (37, 5, '作业配置', '/datastudio/right/jobConfig', null, 'datastudio:right:jobConfig', 'SettingOutlined', 'F', 0, 8, '2023-09-01 18:02:15', '2023-09-26 14:50:24', null); -INSERT INTO `dinky_sys_menu` VALUES (38, 5, '预览配置', '/datastudio/right/previewConfig', null, 'datastudio:right:previewConfig', 'InsertRowRightOutlined', 'F', 0, 9, '2023-09-01 18:03:08', '2023-09-26 14:50:54', null); -INSERT INTO `dinky_sys_menu` VALUES (39, 5, '版本历史', '/datastudio/right/historyVision', null, 'datastudio:right:historyVision', 'HistoryOutlined', 'F', 0, 10, '2023-09-01 18:03:29', '2023-09-26 14:51:03', null); -INSERT INTO `dinky_sys_menu` VALUES (40, 5, '保存点', '/datastudio/right/savePoint', null, 'datastudio:right:savePoint', 'FolderOutlined', 'F', 0, 11, '2023-09-01 18:03:58', '2023-09-26 14:51:13', null); -INSERT INTO `dinky_sys_menu` VALUES (41, 5, '作业信息', '/datastudio/right/jobInfo', null, 'datastudio:right:jobInfo', 'InfoCircleOutlined', 'F', 0, 8, '2023-09-01 18:04:31', '2023-09-25 18:26:45', null); -INSERT INTO `dinky_sys_menu` VALUES (42, 5, '控制台', '/datastudio/bottom/console', null, 'datastudio:bottom:console', 'ConsoleSqlOutlined', 'F', 0, 12, '2023-09-01 18:04:56', '2023-09-26 14:51:24', null); -INSERT INTO `dinky_sys_menu` VALUES (43, 5, '结果', '/datastudio/bottom/result', null, 'datastudio:bottom:result', 'SearchOutlined', 'F', 0, 13, '2023-09-01 18:05:16', '2023-09-26 14:51:36', null); -INSERT INTO `dinky_sys_menu` VALUES (44, 5, 'BI', '/datastudio/bottom/bi', null, 'datastudio:bottom:bi', 'DashboardOutlined', 'F', 0, 14, '2023-09-01 18:05:43', '2023-09-26 14:51:45', null); -INSERT INTO `dinky_sys_menu` VALUES (45, 5, '血缘', '/datastudio/bottom/lineage', null, 'datastudio:bottom:lineage', 'PushpinOutlined', 'F', 0, 15, '2023-09-01 18:07:15', '2023-09-26 14:52:00', null); -INSERT INTO `dinky_sys_menu` VALUES (46, 5, '表数据监控', '/datastudio/bottom/process', null, 'datastudio:bottom:process', 'TableOutlined', 'F', 0, 16, '2023-09-01 18:07:55', '2023-09-26 14:52:38', null); -INSERT INTO `dinky_sys_menu` VALUES (47, 5, '小工具', '/datastudio/bottom/tool', null, 'datastudio:bottom:tool', 'ToolOutlined', 'F', 0, 17, '2023-09-01 18:08:18', '2023-09-26 14:53:04', null); -INSERT INTO `dinky_sys_menu` VALUES (48, 28, '新建', '/registration/cluster/instance/add', null, 'registration:cluster:instance:add', 'PlusOutlined', 'F', 0, 26, '2023-09-06 08:56:45', '2023-09-26 14:56:54', null); -INSERT INTO `dinky_sys_menu` VALUES (49, 28, '回收', '/registration/cluster/instance/recovery', null, 'registration:cluster:instance:recovery', 'DeleteFilled', 'F', 0, 29, '2023-09-06 08:57:30', '2023-09-26 14:56:54', null); -INSERT INTO `dinky_sys_menu` VALUES (50, 28, '编辑', '/registration/cluster/instance/edit', null, 'registration:cluster:instance:edit', 'EditOutlined', 'F', 0, 27, '2023-09-06 08:56:45', '2023-09-26 14:56:54', null); -INSERT INTO `dinky_sys_menu` VALUES (51, 28, '删除', '/registration/cluster/instance/delete', null, 'registration:cluster:instance:delete', 'DeleteOutlined', 'F', 0, 28, '2023-09-06 08:57:30', '2023-09-26 14:56:54', null); -INSERT INTO `dinky_sys_menu` VALUES (52, 30, '新建', '/registration/cluster/config/add', null, 'registration:cluster:config:add', 'PlusOutlined', 'F', 0, 32, '2023-09-06 09:00:31', '2023-09-26 14:58:50', null); -INSERT INTO `dinky_sys_menu` VALUES (53, 30, '编辑', '/registration/cluster/config/edit', null, 'registration:cluster:config:edit', 'EditOutlined', 'F', 0, 33, '2023-09-06 08:56:45', '2023-09-26 14:58:50', null); -INSERT INTO `dinky_sys_menu` VALUES (54, 30, '删除', '/registration/cluster/config/delete', null, 'registration:cluster:config:delete', 'DeleteOutlined', 'F', 0, 34, '2023-09-06 08:57:30', '2023-09-26 14:58:50', null); -INSERT INTO `dinky_sys_menu` VALUES (55, 10, '新建', '/registration/datasource/add', null, 'registration:datasource:add', 'PlusOutlined', 'F', 0, 38, '2023-09-06 09:01:05', '2023-09-26 15:00:42', null); -INSERT INTO `dinky_sys_menu` VALUES (56, 10, '编辑', '/registration/datasource/edit', null, 'registration:datasource:edit', 'EditOutlined', 'F', 0, 39, '2023-09-06 08:56:45', '2023-09-26 15:00:41', null); -INSERT INTO `dinky_sys_menu` VALUES (57, 10, '删除', '/registration/datasource/delete', null, 'registration:datasource:delete', 'DeleteOutlined', 'F', 0, 40, '2023-09-06 08:57:30', '2023-09-26 15:00:42', null); -INSERT INTO `dinky_sys_menu` VALUES (58, 31, '新建', '/registration/alert/instance/add', null, 'registration:alert:instance:add', 'PlusOutlined', 'F', 0, 46, '2023-09-06 09:01:05', '2023-09-26 15:02:04', null); -INSERT INTO `dinky_sys_menu` VALUES (59, 31, '编辑', '/registration/alert/instance/edit', null, 'registration:alert:instance:edit', 'EditOutlined', 'F', 0, 45, '2023-09-06 08:56:45', '2023-09-26 15:01:54', null); -INSERT INTO `dinky_sys_menu` VALUES (60, 31, '删除', '/registration/alert/instance/delete', null, 'registration:alert:instance:delete', 'DeleteOutlined', 'F', 0, 47, '2023-09-06 08:57:30', '2023-09-26 15:02:13', null); -INSERT INTO `dinky_sys_menu` VALUES (61, 29, '新建', '/registration/alert/group/add', null, 'registration:alert:group:add', 'PlusOutlined', 'F', 0, 49, '2023-09-06 09:01:05', '2023-09-26 15:02:48', null); -INSERT INTO `dinky_sys_menu` VALUES (62, 29, '编辑', '/registration/alert/group/edit', null, 'registration:alert:group:edit', 'EditOutlined', 'F', 0, 49, '2023-09-06 08:56:45', '2023-09-26 15:02:36', null); -INSERT INTO `dinky_sys_menu` VALUES (63, 29, '删除', '/registration/alert/group/delete', null, 'registration:alert:group:delete', 'DeleteOutlined', 'F', 0, 50, '2023-09-06 08:57:30', '2023-09-26 15:03:01', null); -INSERT INTO `dinky_sys_menu` VALUES (64, 13, '新建', '/registration/document/add', null, 'registration:document:add', 'PlusOutlined', 'F', 0, 57, '2023-09-06 09:01:05', '2023-09-26 15:04:22', null); -INSERT INTO `dinky_sys_menu` VALUES (65, 13, '编辑', '/registration/document/edit', null, 'registration:document:edit', 'EditOutlined', 'F', 0, 56, '2023-09-06 08:56:45', '2023-09-26 15:04:13', null); -INSERT INTO `dinky_sys_menu` VALUES (66, 13, '删除', '/registration/document/delete', null, 'registration:document:delete', 'DeleteOutlined', 'F', 0, 58, '2023-09-06 08:57:30', '2023-09-26 15:04:32', null); -INSERT INTO `dinky_sys_menu` VALUES (68, 14, '新建', '/registration/fragment/add', null, 'registration:fragment:add', 'PlusOutlined', 'F', 0, 61, '2023-09-06 09:01:05', '2023-09-26 15:05:13', null); -INSERT INTO `dinky_sys_menu` VALUES (69, 14, '编辑', '/registration/fragment/edit', null, 'registration:fragment:edit', 'EditOutlined', 'F', 0, 60, '2023-09-06 08:56:45', '2023-09-26 15:05:04', null); -INSERT INTO `dinky_sys_menu` VALUES (70, 14, '删除', '/registration/fragment/delete', null, 'registration:fragment:delete', 'DeleteOutlined', 'F', 0, 62, '2023-09-06 08:57:30', '2023-09-26 15:05:21', null); -INSERT INTO `dinky_sys_menu` VALUES (72, 15, '新建', '/registration/gitproject/add', null, 'registration:gitproject:add', 'PlusOutlined', 'F', 0, 65, '2023-09-06 09:01:05', '2023-09-26 15:06:01', null); -INSERT INTO `dinky_sys_menu` VALUES (73, 15, '编辑', '/registration/gitproject/edit', null, 'registration:gitproject:edit', 'EditOutlined', 'F', 0, 64, '2023-09-06 08:56:45', '2023-09-26 15:05:52', null); -INSERT INTO `dinky_sys_menu` VALUES (74, 15, '删除', '/registration/gitproject/delete', null, 'registration:gitproject:delete', 'DeleteOutlined', 'F', 0, 66, '2023-09-06 08:57:30', '2023-09-26 15:06:09', null); -INSERT INTO `dinky_sys_menu` VALUES (76, 15, '构建', '/registration/gitproject/build', null, 'registration:gitproject:build', 'PlaySquareOutlined', 'F', 0, 67, '2023-09-06 08:57:30', '2023-09-26 15:06:17', null); -INSERT INTO `dinky_sys_menu` VALUES (77, 15, '查看日志', '/registration/gitproject/showLog', null, 'registration:gitproject:showLog', 'SearchOutlined', 'F', 0, 68, '2023-09-06 08:57:30', '2023-09-26 15:06:26', null); -INSERT INTO `dinky_sys_menu` VALUES (78, 16, '新建', '/registration/udf/template/add', null, 'registration:udf:template:add', 'PlusOutlined', 'F', 0, 71, '2023-09-06 09:01:05', '2023-09-26 15:07:04', null); -INSERT INTO `dinky_sys_menu` VALUES (79, 16, '编辑', '/registration/udf/template/edit', null, 'registration:udf:template:edit', 'EditOutlined', 'F', 0, 70, '2023-09-06 08:56:45', '2023-09-26 15:06:48', null); -INSERT INTO `dinky_sys_menu` VALUES (80, 16, '删除', '/registration/udf/template/delete', null, 'registration:udf:template:delete', 'DeleteOutlined', 'F', 0, 72, '2023-09-06 08:57:30', '2023-09-26 15:07:12', null); -INSERT INTO `dinky_sys_menu` VALUES (82, 19, '上传', '/registration/resource/upload', null, 'registration:resource:upload', 'PlusOutlined', 'F', 0, 77, '2023-09-06 09:01:05', '2023-09-26 15:08:02', null); -INSERT INTO `dinky_sys_menu` VALUES (83, 19, '重命名', '/registration/resource/rename', null, 'registration:resource:rename', 'EditOutlined', 'F', 0, 75, '2023-09-06 08:56:45', '2023-09-26 15:07:45', null); -INSERT INTO `dinky_sys_menu` VALUES (84, 19, '删除', '/registration/resource/delete', null, 'registration:resource:delete', 'DeleteOutlined', 'F', 0, 76, '2023-09-06 08:57:30', '2023-09-26 15:07:54', null); -INSERT INTO `dinky_sys_menu` VALUES (85, 19, '创建文件夹', '/registration/resource/addFolder', null, 'registration:resource:addFolder', 'PlusOutlined', 'F', 0, 74, '2023-09-06 08:57:30', '2023-09-26 15:07:37', null); -INSERT INTO `dinky_sys_menu` VALUES (86, 4, 'Token 令牌', '/auth/token', './AuthCenter/Token', 'auth:token', 'SecurityScanFilled', 'C', 0, 111, '2023-09-05 23:14:23', '2023-09-26 15:15:22', null); -INSERT INTO `dinky_sys_menu` VALUES (87, 21, '添加', '/auth/user/add', null, 'auth:user:add', 'PlusOutlined', 'F', 0, 81, '2023-09-22 22:06:52', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (88, 21, '重置密码', '/auth/user/reset', null, 'auth:user:reset', 'RollbackOutlined', 'F', 0, 84, '2023-09-22 22:08:17', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (89, 21, '恢复用户', '/auth/user/recovery', null, 'auth:user:recovery', 'RadiusSettingOutlined', 'F', 0, 85, '2023-09-22 22:08:53', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (90, 21, '删除', '/auth/user/delete', null, 'auth:user:delete', 'DeleteOutlined', 'F', 0, 83, '2023-09-22 22:09:29', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (91, 21, '修改密码', '/auth/user/changePassword', null, 'auth:user:changePassword', 'EditOutlined', 'F', 0, 86, '2023-09-22 22:10:01', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (92, 21, '分配角色', '/auth/user/assignRole', null, 'auth:user:assignRole', 'ForwardOutlined', 'F', 0, 87, '2023-09-22 22:10:31', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (93, 21, '编辑', '/auth/user/edit', null, 'auth:user:edit', 'EditOutlined', 'F', 0, 82, '2023-09-22 22:11:41', '2023-09-26 15:09:49', null); -INSERT INTO `dinky_sys_menu` VALUES (94, 20, '添加', '/auth/role/add', null, 'auth:role:add', 'PlusOutlined', 'F', 0, 89, '2023-09-22 22:06:52', '2023-09-26 15:11:10', null); -INSERT INTO `dinky_sys_menu` VALUES (95, 20, '删除', '/auth/role/delete', null, 'auth:role:delete', 'DeleteOutlined', 'F', 0, 91, '2023-09-22 22:09:29', '2023-09-26 15:11:10', null); -INSERT INTO `dinky_sys_menu` VALUES (96, 20, '分配菜单', '/auth/role/assignMenu', null, 'auth:role:assignMenu', 'AntDesignOutlined', 'F', 0, 92, '2023-09-22 22:10:31', '2023-09-26 15:11:10', null); -INSERT INTO `dinky_sys_menu` VALUES (97, 20, '编辑', '/auth/role/edit', null, 'auth:role:edit', 'EditOutlined', 'F', 0, 90, '2023-09-22 22:11:41', '2023-09-26 15:11:10', null); -INSERT INTO `dinky_sys_menu` VALUES (98, 20, '查看用户列表', '/auth/role/viewUser', null, 'auth:role:viewUser', 'FundViewOutlined', 'F', 0, 93, '2023-09-22 22:11:41', '2023-09-26 15:11:10', null); -INSERT INTO `dinky_sys_menu` VALUES (99, 86, '添加 Token', '/auth/token/add', null, 'auth:token:add', 'PlusOutlined', 'F', 0, 112, '2023-09-22 22:11:41', '2023-09-26 15:15:46', null); -INSERT INTO `dinky_sys_menu` VALUES (100, 86, '删除 Token', '/auth/token/delete', null, 'auth:token:delete', 'DeleteOutlined', 'F', 0, 114, '2023-09-22 22:11:41', '2023-09-26 15:15:46', null); -INSERT INTO `dinky_sys_menu` VALUES (101, 86, '修改 Token', '/auth/token/edit', null, 'auth:token:edit', 'EditOutlined', 'F', 0, 113, '2023-09-22 22:11:41', '2023-09-26 15:15:46', null); -INSERT INTO `dinky_sys_menu` VALUES (102, 27, '添加', '/auth/rowPermissions/add', null, 'auth:rowPermissions:add', 'PlusOutlined', 'F', 0, 101, '2023-09-22 22:11:41', '2023-09-26 15:13:12', null); -INSERT INTO `dinky_sys_menu` VALUES (103, 27, '编辑', '/auth/rowPermissions/edit', null, 'auth:rowPermissions:edit', 'EditOutlined', 'F', 0, 102, '2023-09-22 22:11:41', '2023-09-26 15:13:12', null); -INSERT INTO `dinky_sys_menu` VALUES (104, 27, '删除', '/auth/rowPermissions/delete', null, 'auth:rowPermissions:delete', 'DeleteOutlined', 'F', 0, 103, '2023-09-22 22:11:41', '2023-09-26 15:13:12', null); -INSERT INTO `dinky_sys_menu` VALUES (105, 23, '添加', '/auth/tenant/add', null, 'auth:tenant:add', 'PlusOutlined', 'F', 0, 105, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); -INSERT INTO `dinky_sys_menu` VALUES (106, 23, '编辑', '/auth/tenant/edit', null, 'auth:tenant:edit', 'EditOutlined', 'F', 0, 106, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); -INSERT INTO `dinky_sys_menu` VALUES (107, 23, '删除', '/auth/tenant/delete', null, 'auth:tenant:delete', 'DeleteOutlined', 'F', 0, 107, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); -INSERT INTO `dinky_sys_menu` VALUES (108, 23, '分配用户', '/auth/tenant/assignUser', null, 'auth:tenant:assignUser', 'EuroOutlined', 'F', 0, 108, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); -INSERT INTO `dinky_sys_menu` VALUES (109, 23, '查看用户', '/auth/tenant/viewUser', null, 'auth:tenant:viewUser', 'FundViewOutlined', 'F', 0, 109, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); -INSERT INTO `dinky_sys_menu` VALUES (110, 23, '设置/取消租户管理员', '/auth/tenant/modifyTenantManager', null, 'auth:tenant:modifyTenantManager', 'ExclamationCircleOutlined', 'F', 0, 110, '2023-09-22 22:11:41', '2023-09-26 15:15:02', null); -INSERT INTO `dinky_sys_menu` VALUES (111, 22, '创建根菜单', '/auth/menu/createRoot', null, 'auth:menu:createRoot', 'FolderAddOutlined', 'F', 0, 95, '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); -INSERT INTO `dinky_sys_menu` VALUES (112, 22, '刷新', '/auth/menu/refresh', null, 'auth:menu:refresh', 'ReloadOutlined', 'F', 0, 97, '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); -INSERT INTO `dinky_sys_menu` VALUES (113, 22, '编辑', '/auth/menu/edit', null, 'auth:menu:edit', 'EditOutlined', 'F', 0, 98, '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); -INSERT INTO `dinky_sys_menu` VALUES (114, 22, '添加子项', '/auth/menu/addSub', null, 'auth:menu:addSub', 'PlusOutlined', 'F', 0, 96, '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); -INSERT INTO `dinky_sys_menu` VALUES (115, 22, '删除', '/auth/menu/delete', null, 'auth:menu:delete', 'DeleteOutlined', 'F', 0, 99, '2023-09-22 22:11:41', '2023-09-26 15:12:26', null); -INSERT INTO `dinky_sys_menu` VALUES (116, 6, '告警策略', '/settings/alertrule', './SettingCenter/AlertRule', 'settings:alertrule', 'AndroidOutlined', 'C', 0, 136, '2023-09-22 23:31:10', '2023-09-26 15:19:52', null); -INSERT INTO `dinky_sys_menu` VALUES (117, 116, '添加', '/settings/alertrule/add', null, 'settings:alertrule:add', 'PlusOutlined', 'F', 0, 137, '2023-09-22 23:34:51', '2023-09-26 15:20:03', null); -INSERT INTO `dinky_sys_menu` VALUES (118, 116, '删除', '/settings/alertrule/delete', null, 'settings:alertrule:delete', 'DeleteOutlined', 'F', 0, 139, '2023-09-22 23:35:20', '2023-09-26 15:20:21', null); -INSERT INTO `dinky_sys_menu` VALUES (119, 116, '编辑', '/settings/alertrule/edit', null, 'settings:alertrule:edit', 'EditOutlined', 'F', 0, 138, '2023-09-22 23:36:32', '2023-09-26 15:20:13', null); -INSERT INTO `dinky_sys_menu` VALUES (120, 8, 'Dinky 服务监控', '/metrics/server', './Metrics/Server', 'metrics:server', 'DashboardOutlined', 'F', 0, 141, '2023-09-22 23:37:43', '2023-09-26 15:21:00', null); -INSERT INTO `dinky_sys_menu` VALUES (121, 8, 'Flink 任务监控', '/metrics/job', './Metrics/Job', 'metrics:job', 'DashboardTwoTone', 'C', 0, 142, '2023-09-22 23:38:34', '2023-09-26 15:21:08', null); -INSERT INTO `dinky_sys_menu` VALUES (122, 24, 'Dinky 环境配置', '/settings/globalsetting/dinky', null, 'settings:globalsetting:dinky', 'SettingOutlined', 'C', 0, 117, '2023-09-22 23:40:30', '2023-09-26 15:16:20', null); -INSERT INTO `dinky_sys_menu` VALUES (123, 24, 'Flink 环境配置', '/settings/globalsetting/flink', null, 'settings:globalsetting:flink', 'SettingOutlined', 'C', 0, 119, '2023-09-22 23:40:30', '2023-09-26 15:16:40', null); -INSERT INTO `dinky_sys_menu` VALUES (124, 24, 'Maven 配置', '/settings/globalsetting/maven', null, 'settings:globalsetting:maven', 'SettingOutlined', 'C', 0, 121, '2023-09-22 23:40:30', '2023-09-26 15:17:04', null); -INSERT INTO `dinky_sys_menu` VALUES (125, 24, 'DolphinScheduler 配置', '/settings/globalsetting/ds', null, 'settings:globalsetting:ds', 'SettingOutlined', 'C', 0, 123, '2023-09-22 23:40:30', '2023-09-26 15:17:23', null); -INSERT INTO `dinky_sys_menu` VALUES (126, 24, 'LDAP 配置', '/settings/globalsetting/ldap', null, 'settings:globalsetting:ldap', 'SettingOutlined', 'C', 0, 125, '2023-09-22 23:40:30', '2023-09-26 15:17:41', null); -INSERT INTO `dinky_sys_menu` VALUES (127, 24, 'Metrics 配置', '/settings/globalsetting/metrics', null, 'settings:globalsetting:metrics', 'SettingOutlined', 'C', 0, 127, '2023-09-22 23:40:30', '2023-09-26 15:18:06', null); -INSERT INTO `dinky_sys_menu` VALUES (128, 24, 'Resource 配置', '/settings/globalsetting/resource', null, 'settings:globalsetting:resource', 'SettingOutlined', 'C', 0, 129, '2023-09-22 23:40:30', '2023-09-26 15:18:27', null); -INSERT INTO `dinky_sys_menu` VALUES (129, 122, '编辑', '/settings/globalsetting/dinky/edit', null, 'settings:globalsetting:dinky:edit', 'EditOutlined', 'F', 0, 118, '2023-09-22 23:44:18', '2023-09-26 15:16:29', null); -INSERT INTO `dinky_sys_menu` VALUES (130, 123, '编辑', '/settings/globalsetting/flink/edit', null, 'settings:globalsetting:flink:edit', 'EditOutlined', 'F', 0, 120, '2023-09-22 23:44:18', '2023-09-26 15:16:50', null); -INSERT INTO `dinky_sys_menu` VALUES (131, 124, '编辑', '/settings/globalsetting/maven/edit', null, 'settings:globalsetting:maven:edit', 'EditOutlined', 'F', 0, 122, '2023-09-22 23:44:18', '2023-09-26 15:17:13', null); -INSERT INTO `dinky_sys_menu` VALUES (132, 125, '编辑', '/settings/globalsetting/ds/edit', null, 'settings:globalsetting:ds:edit', 'EditOutlined', 'F', 0, 124, '2023-09-22 23:44:18', '2023-09-26 15:17:32', null); -INSERT INTO `dinky_sys_menu` VALUES (133, 126, '编辑', '/settings/globalsetting/ldap/edit', null, 'settings:globalsetting:ldap:edit', 'EditOutlined', 'F', 0, 126, '2023-09-22 23:44:18', '2023-09-26 15:17:51', null); -INSERT INTO `dinky_sys_menu` VALUES (134, 127, '编辑', '/settings/globalsetting/metrics/edit', null, 'settings:globalsetting:metrics:edit', 'EditOutlined', 'F', 0, 128, '2023-09-22 23:44:18', '2023-09-26 15:18:16', null); -INSERT INTO `dinky_sys_menu` VALUES (135, 128, '编辑', '/settings/globalsetting/resource/edit', null, 'settings:globalsetting:resource:edit', 'EditOutlined', 'F', 0, 130, '2023-09-22 23:44:18', '2023-09-26 15:18:39', null); -INSERT INTO `dinky_sys_menu` VALUES (136, 12, '告警模版', '/registration/alert/template', './RegCenter/Alert/AlertTemplate', 'registration:alert:template', 'AlertOutlined', 'C', 0, 51, '2023-09-23 21:34:43', '2023-09-26 15:03:14', null); -INSERT INTO `dinky_sys_menu` VALUES (137, 136, '添加', '/registration/alert/template/add', null, 'registration:alert:template:add', 'PlusOutlined', 'F', 0, 52, '2023-09-23 21:36:37', '2023-09-26 15:03:22', null); -INSERT INTO `dinky_sys_menu` VALUES (138, 136, '编辑', '/registration/alert/template/edit', null, 'registration:alert:template:edit', 'EditOutlined', 'F', 0, 53, '2023-09-23 21:37:00', '2023-09-26 15:03:30', null); -INSERT INTO `dinky_sys_menu` VALUES (139, 136, '删除', '/registration/alert/template/delete', null, 'registration:alert:template:delete', 'DeleteOutlined', 'F', 0, 54, '2023-09-23 21:37:43', '2023-09-26 15:03:37', null); -INSERT INTO `dinky_sys_menu` VALUES (140, 25, '系统日志', '/settings/systemlog/rootlog', null, 'settings:systemlog:rootlog', 'BankOutlined', 'F', 0, 133, '2023-09-23 21:43:57', '2023-09-26 15:19:14', null); -INSERT INTO `dinky_sys_menu` VALUES (141, 25, '日志列表', '/settings/systemlog/loglist', null, 'settings:systemlog:loglist', 'BankOutlined', 'F', 0, 134, '2023-09-23 21:45:05', '2023-09-26 15:19:23', null); -INSERT INTO `dinky_sys_menu` VALUES (142, 30, '部署 Session 集群', '/registration/cluster/config/deploy', null, 'registration:cluster:config:deploy', 'PlayCircleOutlined', 'F', 0, 35, '2023-09-26 13:42:55', '2023-09-26 14:58:50', null); -INSERT INTO `dinky_sys_menu` VALUES (143, 30, ' 心跳检测', '/registration/cluster/config/heartbeat', null, 'registration:cluster:config:heartbeat', 'HeartOutlined', 'F', 0, 36, '2023-09-26 13:44:23', '2023-09-26 14:58:50', null); -INSERT INTO `dinky_sys_menu` VALUES (144, 28, '心跳检测', '/registration/cluster/instance/heartbeat', null, 'registration:cluster:instance:heartbeat', 'HeartOutlined', 'F', 0, 30, '2023-09-26 13:51:04', '2023-09-26 14:57:42', null); -INSERT INTO `dinky_sys_menu` VALUES (145, 10, '心跳检测', '/registration/datasource/heartbeat', null, 'registration:datasource:heartbeat', 'HeartOutlined', 'F', 0, 41, '2023-09-26 14:00:06', '2023-09-26 15:00:42', null); -INSERT INTO `dinky_sys_menu` VALUES (146, 10, ' 拷贝', '/registration/datasource/copy', null, 'registration:datasource:copy', 'CopyOutlined', 'F', 0, 42, '2023-09-26 14:02:28', '2023-09-26 15:00:41', null); - --- ---------------------------- --- Table structure dinky_sys_role_menu --- ---------------------------- -drop table if exists `dinky_sys_role_menu`; -CREATE TABLE `dinky_sys_role_menu` ( - `id` bigint NOT null AUTO_INCREMENT COMMENT 'id', - `role_id` bigint NOT null COMMENT 'role id', - `menu_id` bigint NOT null COMMENT 'menu id', - `create_time` datetime not null default current_timestamp comment 'create time', - `update_time` datetime not null default current_timestamp on update current_timestamp comment 'modify time', - PRIMARY KEY (`id`), - UNIQUE KEY `un_role_menu_inx` (`role_id`,`menu_id`) -) ENGINE=InnoDB ; - - - --- ---------------------------- --- Table structure dinky_sys_token --- ---------------------------- -drop table if exists `dinky_sys_token`; -CREATE TABLE `dinky_sys_token` ( - `id` bigint NOT NULL AUTO_INCREMENT COMMENT 'id', - `token_value` varchar(255) NOT NULL COMMENT 'token value', - `user_id` bigint NOT NULL COMMENT 'user id', - `role_id` bigint NOT NULL COMMENT 'role id', - `tenant_id` bigint NOT NULL COMMENT 'tenant id', - `expire_type` tinyint NOT NULL COMMENT '1: never expire, 2: expire after a period of time, 3: expire at a certain time', - `expire_start_time` datetime DEFAULT NULL COMMENT 'expire start time ,when expire_type = 3 , it is the start time of the period', - `expire_end_time` datetime DEFAULT NULL COMMENT 'expire end time ,when expire_type = 2,3 , it is the end time of the period', - `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime NOT NULL COMMENT 'modify time', - `creator` bigint DEFAULT NULL COMMENT '创建人', - `updator` bigint DEFAULT NULL COMMENT '修改人', - PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COMMENT='token management'; - - - --- ---------------------------- --- Table structure dinky_sys_alert --- ---------------------------- -drop table if exists `dinky_alert_template`; -create table if not exists dinky_alert_template -( - id int auto_increment - primary key COMMENT 'id', - name varchar(20) COMMENT 'template name', - template_content text null COMMENT 'template content', - enabled tinyint default 1 null COMMENT 'is enable', - create_time datetime null COMMENT 'create time', - update_time datetime null COMMENT 'update time' -); - -drop table if exists `dinky_alert_rules`; -create table if not exists dinky_alert_rules -( - id int auto_increment - primary key comment 'id', - name varchar(40) unique not null comment 'rule name', - rule text null comment 'specify rule', - template_id int null comment 'template id', - rule_type varchar(10) null comment 'alert rule type', - trigger_conditions varchar(20) null comment 'trigger conditions', - description text null comment 'description', - enabled tinyint default 1 null comment 'is enable', - create_time datetime null comment 'create time', - update_time datetime null comment 'update time' -); - - - --- ---------------------------- --- Records of dinky_alert_rule --- ---------------------------- -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (3, 'alert.rule.jobFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"''FAILED''","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-04 23:03:02'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (4, 'alert.rule.getJobInfoFail', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"''UNKNOWN''","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-05 18:03:43'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (5, 'alert.rule.jobRestart', '[{"ruleKey":"jobInstance.status","ruleOperator":"EQ","ruleValue":"''RESTARTING''","rulePriority":"1"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:35:12'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (6, 'alert.rule.checkpointFail', '[{"ruleKey":"checkpointRule.checkFailed(#key,#checkPoints)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:49:03'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (7, 'alert.rule.jobRunException', '[{"ruleKey":"exceptionRule.isException(#key,#exceptions)","ruleOperator":"EQ","ruleValue":"true"}]', 1, 'SYSTEM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 21:50:12'); -INSERT INTO dinky_alert_rules (id, name, rule, template_id, rule_type, trigger_conditions, description, enabled, create_time, update_time) VALUES (8, 'alert.rule.checkpointTimeout', '[{"ruleKey":"checkpointRule.checkpointTime(#key,#checkPoints)","ruleOperator":"GE","ruleValue":"1000"}]', 1, 'CUSTOM', ' or ', '', 1, '1970-01-01 00:00:00', '2023-09-06 22:23:35'); - -INSERT INTO dinky_alert_template VALUES (1, 'Default', ' -- **Job Name :** ${task.name} -- **Job Status :** ${jobInstance.status} -- **Alert Time :** ${time} -- **Start Time :** ${startTime} -- **End Time :** ${endTime} -- **${(exceptions.rootException)?substring(0,20)}** -[Go toTask Web](http://${taskUrl}) -', 1, null, null); - -CREATE TABLE `dinky_udf_manage` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `name` varchar(50) DEFAULT NULL COMMENT 'udf name', - `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', - `task_id` int(11) DEFAULT NULL COMMENT 'task id', - `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', - `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', - `create_time` datetime DEFAULT NULL COMMENT 'create time', - `update_time` datetime DEFAULT NULL COMMENT 'update time' -) ENGINE = InnoDB ROW_FORMAT = DYNAMIC; From 7563410eb02be98faf4ed60ab6c1ac6496dda938 Mon Sep 17 00:00:00 2001 From: zhu-mingye <934230207@qq.com> Date: Tue, 7 Nov 2023 19:22:33 -0600 Subject: [PATCH 17/21] Fix udf bug (#2501) * Spotless Apply * Spotless Apply * fix-udf-bug * fix-udf-bug * modify git build model layout * fix table enum bug * Spotless Apply * modify some code * Spotless Apply * modify some code * modify git build model layout && udf bug * modify some code * modify some code * Spotless Apply --------- Co-authored-by: zhu-mingye --- .../java/org/dinky/data/model/GitProject.java | 5 + .../resource/impl/ResourceServiceImpl.java | 2 +- .../main/java/org/dinky/utils/MavenUtil.java | 29 +- .../java/org/dinky/utils/RuntimeUtils.java | 6 +- .../resources/i18n/messages_en_US.properties | 2 +- .../resources/i18n/messages_zh_CN.properties | 2 +- dinky-web/src/locales/en-US/global.ts | 2 + dinky-web/src/locales/en-US/pages.ts | 3 +- dinky-web/src/locales/zh-CN/global.ts | 2 + dinky-web/src/locales/zh-CN/pages.ts | 2 +- .../Console/ConsoleContent.tsx | 2 +- .../SchemaDesc/ColumnInfo/Index.tsx | 142 ----- .../RightTagsRouter/SchemaDesc/index.tsx | 26 +- .../components/BuildSteps/index.tsx | 80 ++- .../components/ProjectProTable/index.tsx | 50 +- .../pages/RegCenter/GitProject/constans.tsx | 67 ++- .../src/pages/RegCenter/GitProject/data.d.tsx | 28 +- .../components/ResourceOverView/index.tsx | 507 +++++++++--------- .../UDF/components/UDFRegister/index.tsx | 4 + script/sql/dinky-mysql.sql | 2 +- script/sql/dinky-pg.sql | 12 + .../1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql | 2 +- 22 files changed, 483 insertions(+), 494 deletions(-) delete mode 100644 dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/ColumnInfo/Index.tsx diff --git a/dinky-admin/src/main/java/org/dinky/data/model/GitProject.java b/dinky-admin/src/main/java/org/dinky/data/model/GitProject.java index dde18c8624..d026490332 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/GitProject.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/GitProject.java @@ -98,6 +98,11 @@ public class GitProject extends SuperEntity { @TableField(value = "build_state") private Integer buildState; + /** + * 区别于 java 和 Python 类型 | different from java and python; + * 1. 构建 java 工程时: 步骤值映射如下: 0: 环境检查 1: 克隆项目 2: 编译构建 3: 获取产物 4: 分析 UDF 5: 完成; (when build java project, the step value is as follows: 0: environment check 1: clone project 2: compile and build 3: get artifact 4: analyze UDF 5: finish) + * 2. 构建 python 工程时: 步骤值映射如下: 0: 环境检查 1: 克隆项目 2: 获取产物 3: 分析 UDF 4: 完成;(when build python project, the step value is as follows: 0: environment check 1: clone project 2: get artifact 3: analyze UDF 4: finish) + */ @ApiModelProperty(value = "Build Step", dataType = "Integer") @TableField(value = "build_step") private Integer buildStep; diff --git a/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java index 2043142561..6edab021fa 100644 --- a/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/resource/impl/ResourceServiceImpl.java @@ -169,7 +169,7 @@ public File getFile(Integer id) { Resources resources = getById(id); Assert.notNull(resources, () -> new BusException(Status.RESOURCE_DIR_OR_FILE_NOT_EXIST)); Assert.isFalse(resources.getSize() > ALLOW_MAX_CAT_CONTENT_SIZE, () -> new BusException("file is too large!")); - return URLUtils.toFile("rs:" + resources.getFullName()); + return URLUtils.toFile("rs://" + resources.getFullName()); } @Transactional(rollbackFor = Exception.class) diff --git a/dinky-admin/src/main/java/org/dinky/utils/MavenUtil.java b/dinky-admin/src/main/java/org/dinky/utils/MavenUtil.java index 63a0289f5f..eb41b3741c 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/MavenUtil.java +++ b/dinky-admin/src/main/java/org/dinky/utils/MavenUtil.java @@ -93,7 +93,8 @@ public static boolean build( } else { localRepositoryDirectory = repositoryDir; } - String mavenCommandLine = getMavenCommandLine(pom, mavenHome, localRepositoryDirectory, setting, goals, args); + String mavenCommandLine = + getMavenCommandLineByMvn(pom, mavenHome, localRepositoryDirectory, setting, goals, args); Opt.ofNullable(consumer).ifPresent(c -> c.accept("Executing command: " + mavenCommandLine + "\n")); int waitValue = RuntimeUtils.run( @@ -102,7 +103,7 @@ public static boolean build( s = DateUtil.date().toMsStr() + " - " + s + "\n"; consumer.accept(s); }, - log::error); + consumer::accept); return waitValue == 0; } @@ -149,6 +150,30 @@ public static String getMavenCommandLine( return StrUtil.join(" ", commandLine); } + public static String getMavenCommandLineByMvn( + String projectDir, + String mavenHome, + String repositoryDir, + String settingsPath, + List goals, + List args) { + projectDir = StrUtil.wrap(projectDir, "\""); + settingsPath = StrUtil.wrap(settingsPath, "\""); + List commandLine = new LinkedList<>(); + + commandLine.add(mavenHome + "/bin/mvn"); + commandLine.add("-Dmaven.multiModuleProjectDirectory=" + projectDir); + commandLine.add("-Dmaven.home=" + StrUtil.wrap(mavenHome, "\"")); + Opt.ofBlankAble(repositoryDir) + .ifPresent(x -> commandLine.add("-Dmaven.repo.local=" + StrUtil.wrap(repositoryDir, "\""))); + commandLine.add("-Dclassworlds.conf=" + StrUtil.wrap(mavenHome + "/bin/m2.conf", "\"")); + commandLine.add("-s " + settingsPath); + commandLine.add("-f " + projectDir); + commandLine.add(StrUtil.join(" ", args)); + commandLine.add(StrUtil.join(" ", goals)); + return StrUtil.join(" ", commandLine); + } + public static String getMavenVersion() { return RuntimeUtil.execForStr(getMavenHome() + "/bin/" + EXECTOR + " -v"); } diff --git a/dinky-admin/src/main/java/org/dinky/utils/RuntimeUtils.java b/dinky-admin/src/main/java/org/dinky/utils/RuntimeUtils.java index c30f28b356..87f4ebeeaf 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/RuntimeUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/RuntimeUtils.java @@ -29,6 +29,7 @@ import java.util.function.Consumer; import java.util.stream.Collectors; +import cn.hutool.core.exceptions.ExceptionUtil; import cn.hutool.core.lang.Opt; import lombok.extern.slf4j.Slf4j; @@ -61,7 +62,8 @@ public static int run(String shell, Consumer outputConsumer, Consumer outputConsumer, Consumer x.accept(errMsg)); } } catch (IOException | InterruptedException e) { - e.printStackTrace(); + errorConsumer.accept(ExceptionUtil.stacktraceToOneLineString(e)); } return waitValue; } diff --git a/dinky-common/src/main/resources/i18n/messages_en_US.properties b/dinky-common/src/main/resources/i18n/messages_en_US.properties index 272a21ef52..d03793581e 100644 --- a/dinky-common/src/main/resources/i18n/messages_en_US.properties +++ b/dinky-common/src/main/resources/i18n/messages_en_US.properties @@ -11,7 +11,7 @@ ds.work.flow.not.save=Please Save Workflow First schedule.status.unknown=Unknown Status: {0} user.binding.role.delete.all=User Binding Role Delete All modify.failed=Update Failed -git.build.success=Git Build Success +git.build.success=Pre-update status success, start executing the build process menu.has.child=Menu Has Child, Can Not Delete tenant.already.exists=Tenant Already Exists save.failed=Save Failed diff --git a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties index 850611025b..24b5b2b71d 100644 --- a/dinky-common/src/main/resources/i18n/messages_zh_CN.properties +++ b/dinky-common/src/main/resources/i18n/messages_zh_CN.properties @@ -11,7 +11,7 @@ ds.work.flow.not.save=\u8BF7\u5148\u4FDD\u5B58\u5DE5\u4F5C\u6D41 schedule.status.unknown=\u672A\u77E5\u72B6\u6001: {0} user.binding.role.delete.all=\u7528\u6237\u7ED1\u5B9A\u89D2\u8272\u5220\u9664\u6240\u6709 modify.failed=\u4FEE\u6539\u5931\u8D25 -git.build.success=\u6784\u5EFA\u6210\u529F +git.build.success=\u9884\u66F4\u65B0\u72B6\u6001\u6210\u529F,\u5F00\u59CB\u6267\u884C\u6784\u5EFA\u6D41\u7A0B menu.has.child=\u5B58\u5728\u5B50\u83DC\u5355 \u4E0D\u5141\u8BB8\u5220\u9664 tenant.already.exists=\u79DF\u6237\u5DF2\u5B58\u5728 save.failed=\u4FDD\u5B58\u5931\u8D25 diff --git a/dinky-web/src/locales/en-US/global.ts b/dinky-web/src/locales/en-US/global.ts index 061b06cf36..14d660283c 100644 --- a/dinky-web/src/locales/en-US/global.ts +++ b/dinky-web/src/locales/en-US/global.ts @@ -24,6 +24,8 @@ export default { 'button.recovery': 'Recovery', 'button.cancel': 'Cancel', 'button.finish': 'Finish', + 'button.retry': 'Retry', + 'button.rebuild': 'Re Build', 'button.back': 'Back', 'button.save': 'Save', 'button.delete': 'Delete', diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index 8528d0b6b8..2dabb560b2 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -760,7 +760,8 @@ export default { 'rc.gp.build.step.1': 'Check Env', 'rc.gp.build.step.2': 'Git Clone', 'rc.gp.build.step.3': 'Maven Build', - 'rc.gp.build.step.4': 'Get Jars', + // 获取产物 + 'rc.gp.build.step.4': 'Get Artifact', 'rc.gp.build.step.5': 'Analysis UDF', 'rc.gp.build.step.6': 'Finish', 'rc.gp.buildArgs': 'Build Arguments', diff --git a/dinky-web/src/locales/zh-CN/global.ts b/dinky-web/src/locales/zh-CN/global.ts index 331eaa0e46..ada4aa2e27 100644 --- a/dinky-web/src/locales/zh-CN/global.ts +++ b/dinky-web/src/locales/zh-CN/global.ts @@ -26,6 +26,8 @@ export default { 'button.confirm': '确定', 'button.cancel': '取消', 'button.finish': '完成', + 'button.retry': '重试', + 'button.rebuild': '重新构建', 'button.back': '返回', 'button.save': '保存', 'button.delete': '删除', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 3c33e9ed72..16528c23cd 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -726,7 +726,7 @@ export default { 'rc.gp.build.step.1': '环境检查', 'rc.gp.build.step.2': '克隆项目', 'rc.gp.build.step.3': '编译构建', - 'rc.gp.build.step.4': '获取 Jar 包', + 'rc.gp.build.step.4': '获取产物', 'rc.gp.build.step.5': '分析 UDF', 'rc.gp.build.step.6': '完成', 'rc.gp.buildArgs': '构建参数', diff --git a/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx b/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx index 6a2104351e..6ae7e22fa1 100644 --- a/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx +++ b/dinky-web/src/pages/DataStudio/BottomContainer/Console/ConsoleContent.tsx @@ -163,7 +163,7 @@ const ConsoleContent = (props: ConsoleProps) => { onSelect={onSelect} treeData={[processNode]} expandedKeys={expandedKeys} - expandAction={"doubleClick"} + expandAction={'doubleClick'} onExpand={handleExpand} /> ) : ( diff --git a/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/ColumnInfo/Index.tsx b/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/ColumnInfo/Index.tsx deleted file mode 100644 index 2094995ee5..0000000000 --- a/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/ColumnInfo/Index.tsx +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { DataSources } from '@/types/RegCenter/data'; -import { transformTreeData } from '@/utils/function'; -import { l } from '@/utils/intl'; -import { CheckSquareOutlined, KeyOutlined } from '@ant-design/icons'; -import { ProTable } from '@ant-design/pro-components'; -import { ProColumns } from '@ant-design/pro-table/es/typing'; -import React from 'react'; - -type ColumnInfoProps = { - columnInfo: Partial; -}; - -const ColumnInfo: React.FC = (props) => { - const { columnInfo } = props; - - const columns: ProColumns[] = [ - { - title: l('rc.ds.no'), - dataIndex: 'position', - width: '4%' - }, - { - title: l('rc.ds.columnName'), - dataIndex: 'name', - width: '10%', - ellipsis: true - }, - { - title: l('rc.ds.columnType'), - dataIndex: 'type', - width: '6%' - }, - { - title: l('rc.ds.primarykey'), - dataIndex: 'keyFlag', - width: '4%', - render: (_, record) => { - return record.keyFlag ? : undefined; - } - }, - { - title: l('rc.ds.autoIncrement'), - dataIndex: 'autoIncrement', - width: '4%', - render: (_, record) => { - return record.autoIncrement ? ( - - ) : undefined; - } - }, - { - title: l('rc.ds.isNull'), - dataIndex: 'nullable', - width: '4%', - render: (_, record) => { - return !record.nullable ? : undefined; - } - }, - { - title: l('rc.ds.default'), - dataIndex: 'defaultValue', - ellipsis: true, - width: '8%' - }, - { - title: l('rc.ds.length'), - dataIndex: 'length', - width: '4%' - }, - { - title: l('rc.ds.precision'), - dataIndex: 'precision', - width: '4%' - }, - { - title: l('rc.ds.decimalDigits'), - dataIndex: 'scale', - ellipsis: true, - width: '6%' - }, - { - title: l('rc.ds.character'), - dataIndex: 'characterSet', - width: '6%', - ellipsis: true - }, - { - title: l('rc.ds.collationRule'), - dataIndex: 'collation', - width: '10%', - ellipsis: true - }, - { - title: l('rc.ds.javaType'), - dataIndex: 'javaType', - ellipsis: true, - width: '8%' - }, - { - title: l('rc.ds.comment'), - dataIndex: 'comment', - ellipsis: true - } - ]; - - return ( - <> - - toolBarRender={false} - pagination={{ - defaultPageSize: 14, - hideOnSinglePage: true - }} - search={false} - options={false} - size={'small'} - bordered - columns={columns} - dataSource={transformTreeData(columnInfo) as DataSources.Column[]} - /> - - ); -}; - -export default ColumnInfo; diff --git a/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/index.tsx b/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/index.tsx index aeaefd9c66..bca42d43eb 100644 --- a/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/index.tsx +++ b/dinky-web/src/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/SchemaDesc/index.tsx @@ -1,18 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ import { Height80VHDiv } from '@/components/StyledComponents'; diff --git a/dinky-web/src/pages/RegCenter/GitProject/components/BuildSteps/index.tsx b/dinky-web/src/pages/RegCenter/GitProject/components/BuildSteps/index.tsx index a0284a630d..7809dd7180 100644 --- a/dinky-web/src/pages/RegCenter/GitProject/components/BuildSteps/index.tsx +++ b/dinky-web/src/pages/RegCenter/GitProject/components/BuildSteps/index.tsx @@ -1,19 +1,19 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ @@ -29,15 +29,19 @@ import { API_CONSTANTS } from '@/services/endpoints'; import { GitProject } from '@/types/RegCenter/data.d'; import { InitGitBuildStepsState } from '@/types/RegCenter/init.d'; import { GitBuildStepsState } from '@/types/RegCenter/state.d'; -import { Modal } from 'antd'; +import { l } from '@/utils/intl'; +import { Button, Modal } from 'antd'; import React, { useEffect, useState } from 'react'; /** * props */ type BuildStepsProps = { - onCancel: (flag?: boolean) => void; + onOk: (flag?: boolean) => void; + onReTry?: () => void; + onRebuild: () => void; title: string; + showLog?: boolean; values: Partial; }; @@ -45,7 +49,7 @@ export const BuildSteps: React.FC = (props) => { /** * extract props */ - const { onCancel: handleModalVisible, title, values } = props; + const { onOk: handleModalVisible, onReTry, showLog = false, onRebuild, title, values } = props; // todo: refactor this const [buildStepState, setBuildStepState] = useState(InitGitBuildStepsState); @@ -88,7 +92,7 @@ export const BuildSteps: React.FC = (props) => { // type // 1是总状态 2是log 3是部分状态 // status // 0是失败 1是进行中 2 完成 let result = JSON.parse(e.data); - const { currentStep, type, data, status, history } = result; + const { currentStep, type, data, status } = result; lastStep = currentStep; if (type === 0) { @@ -171,18 +175,46 @@ export const BuildSteps: React.FC = (props) => { handleModalVisible(); }; + // todo: 重试需要实现在不关闭弹窗的情况下,重新构建, 目前是关闭弹窗,重新打开,重新构建 + const handleReTry = () => { + onReTry?.(); + }; + + const footerButtons = [ + , + , + + ]; + /** * render */ return ( - handleCancel()} - okButtonProps={{ style: { display: 'none' } }} - > + { title: l('rc.gp.buildStep'), dataIndex: 'buildStep', hideInSearch: true, - filters: GIT_PROJECT_BUILD_STEP, - valueEnum: GIT_PROJECT_BUILD_STEP_ENUM + // filters: GIT_PROJECT_BUILD_STEP, + valueEnum: (row) => + row.codeType === 1 ? GIT_PROJECT_BUILD_STEP_JAVA_ENUM : GIT_PROJECT_BUILD_STEP_PYTHON_ENUM }, { title: l('rc.gp.buildState'), @@ -352,6 +353,15 @@ const ProjectProTable: React.FC = () => { return ; }; + /** + * re try build + * @param value + */ + const handleReTryBuild = async (value: Partial) => { + handleCancel(); + await handleBuild(value); + }; + /** * render */ @@ -401,7 +411,9 @@ const ProjectProTable: React.FC = () => { {gitProjectStatus.buildOpen && ( handleReTryBuild(gitProjectStatus.value)} + onReTry={() => handleReTryBuild(gitProjectStatus.value)} values={gitProjectStatus.value} /> )} @@ -410,7 +422,9 @@ const ProjectProTable: React.FC = () => { {gitProjectStatus.logOpen && ( handleReTryBuild(gitProjectStatus.value)} values={gitProjectStatus.value} /> )} diff --git a/dinky-web/src/pages/RegCenter/GitProject/constans.tsx b/dinky-web/src/pages/RegCenter/GitProject/constans.tsx index 81f901ea18..5d08f90b78 100644 --- a/dinky-web/src/pages/RegCenter/GitProject/constans.tsx +++ b/dinky-web/src/pages/RegCenter/GitProject/constans.tsx @@ -1,19 +1,19 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ @@ -131,7 +131,7 @@ export const CLONE_TYPES = [ * @param item */ export const renderBranchesTagColor = (item: string) => { - let colorTag = item.includes('dev') + return item.includes('dev') ? 'processing' : item.includes('test') ? 'warning' @@ -142,13 +142,14 @@ export const renderBranchesTagColor = (item: string) => { : item.includes('main') ? 'success' : 'default'; - return colorTag; }; /** - * git project build step enum + * 区别于 java 和 Python 类型 | different from java and python + * 构建 java 工程时: 步骤值映射如下: 0: 环境检查 1: 克隆项目 2: 编译构建 3: 获取产物 4: 分析 UDF 5: 完成; (when build java project, the step value is as follows: 0: environment check 1: clone project 2: compile and build 3: get artifact 4: analyze UDF 5: finish) + * 构建 python 工程时: 步骤值映射如下: 0: 环境检查 1: 克隆项目 2: 获取产物 3: 分析 UDF 4: 完成;(when build python project, the step value is as follows: 0: environment check 1: clone project 2: get artifact 3: analyze UDF 4: finish) */ -export const GIT_PROJECT_BUILD_STEP_ENUM = { +const GIT_PROJECT_BUILD_STEP_BASE = { 0: { title: l('rc.gp.build.step.0'), text: l('rc.gp.build.step.0'), @@ -158,7 +159,13 @@ export const GIT_PROJECT_BUILD_STEP_ENUM = { title: l('rc.gp.build.step.1'), text: l('rc.gp.build.step.1'), status: 'default' - }, + } +}; +/** + * git project build step enum + */ +export const GIT_PROJECT_BUILD_STEP_JAVA_ENUM = { + ...GIT_PROJECT_BUILD_STEP_BASE, 2: { title: l('rc.gp.build.step.2'), text: l('rc.gp.build.step.2'), @@ -185,6 +192,31 @@ export const GIT_PROJECT_BUILD_STEP_ENUM = { status: 'success' } }; + +export const GIT_PROJECT_BUILD_STEP_PYTHON_ENUM = { + ...GIT_PROJECT_BUILD_STEP_BASE, + 2: { + title: l('rc.gp.build.step.2'), + text: l('rc.gp.build.step.2'), + status: 'processing' + }, + 3: { + title: l('rc.gp.build.step.4'), + text: l('rc.gp.build.step.4'), + status: 'success' + }, + 4: { + title: l('rc.gp.build.step.5'), + text: l('rc.gp.build.step.5'), + status: 'success' + }, + 5: { + title: l('rc.gp.build.step.6'), + text: l('rc.gp.build.step.6'), + status: 'success' + } +}; + /** * git project build step filter */ @@ -228,7 +260,6 @@ export const GIT_PROJECT_BUILD_STEP = [ /** * git project build step enum - * @type {{"100%": string, "0%": string, "20%": string, "40%": string, "60%": string, "80%": string}} */ export const processColor = { '0%': '#8ac1ea', diff --git a/dinky-web/src/pages/RegCenter/GitProject/data.d.tsx b/dinky-web/src/pages/RegCenter/GitProject/data.d.tsx index 4c38d7af06..925a944cd7 100644 --- a/dinky-web/src/pages/RegCenter/GitProject/data.d.tsx +++ b/dinky-web/src/pages/RegCenter/GitProject/data.d.tsx @@ -1,24 +1,26 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ export type BuildStepsState = { key: number; title: string; - status: string; + status: string; // 0-notStart(未开始) 1-process(进行中/构建中) 2-failed(构建失败) 3-success(构建成功) description: string; disabled: boolean; onClick: () => void; diff --git a/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx b/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx index e788ec9fc0..332f1685f6 100644 --- a/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx +++ b/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx @@ -18,286 +18,283 @@ */ import RightContextMenu from '@/components/RightContextMenu'; -import {AuthorizedObject, useAccess} from '@/hooks/useAccess'; +import { AuthorizedObject, useAccess } from '@/hooks/useAccess'; import { - RIGHT_CONTEXT_FILE_MENU, - RIGHT_CONTEXT_FOLDER_MENU + RIGHT_CONTEXT_FILE_MENU, + RIGHT_CONTEXT_FOLDER_MENU } from '@/pages/RegCenter/Resource/components/constants'; import FileShow from '@/pages/RegCenter/Resource/components/FileShow'; import FileTree from '@/pages/RegCenter/Resource/components/FileTree'; import ResourceModal from '@/pages/RegCenter/Resource/components/ResourceModal'; import ResourcesUploadModal from '@/pages/RegCenter/Resource/components/ResourcesUploadModal'; -import {handleOption, handleRemoveById, queryDataByParams} from '@/services/BusinessCrud'; -import {API_CONSTANTS} from '@/services/endpoints'; -import {ResourceInfo} from '@/types/RegCenter/data'; -import {InitResourceState} from '@/types/RegCenter/init.d'; -import {ResourceState} from '@/types/RegCenter/state.d'; -import {l} from '@/utils/intl'; -import {ProCard} from '@ant-design/pro-components'; -import {MenuInfo} from 'rc-menu/es/interface'; -import {Resizable} from 're-resizable'; -import React, {useCallback, useState} from 'react'; -import {useAsyncEffect} from "ahooks"; -import {unSupportView} from "@/utils/function"; +import { handleOption, handleRemoveById, queryDataByParams } from '@/services/BusinessCrud'; +import { API_CONSTANTS } from '@/services/endpoints'; +import { ResourceInfo } from '@/types/RegCenter/data'; +import { InitResourceState } from '@/types/RegCenter/init.d'; +import { ResourceState } from '@/types/RegCenter/state.d'; +import { unSupportView } from '@/utils/function'; +import { l } from '@/utils/intl'; +import { ProCard } from '@ant-design/pro-components'; +import { useAsyncEffect } from 'ahooks'; +import { MenuInfo } from 'rc-menu/es/interface'; +import { Resizable } from 're-resizable'; +import React, { useCallback, useState } from 'react'; const ResourceOverView: React.FC = () => { - const [resourceState, setResourceState] = useState(InitResourceState); + const [resourceState, setResourceState] = useState(InitResourceState); - const [editModal, setEditModal] = useState(''); + const [editModal, setEditModal] = useState(''); - const [uploadValue] = useState({ - url: API_CONSTANTS.RESOURCE_UPLOAD, - pid: '', - description: '' - }); + const [uploadValue] = useState({ + url: API_CONSTANTS.RESOURCE_UPLOAD, + pid: '', + description: '' + }); - const refreshTree = async () => { - await queryDataByParams(API_CONSTANTS.RESOURCE_SHOW_TREE).then((res) => - setResourceState((prevState) => ({...prevState, treeData: res ?? []})) - ); - }; - - useAsyncEffect(async () => { - await refreshTree(); - }, []) - - /** - * query content by id - * @type {(id: number) => Promise} - */ - const queryContent: (id: number) => Promise = useCallback( - async (id: number) => { - await queryDataByParams(API_CONSTANTS.RESOURCE_GET_CONTENT_BY_ID, { - id - }).then((res) => setResourceState((prevState) => ({...prevState, content: res ?? ''}))); - }, - [] + const refreshTree = async () => { + await queryDataByParams(API_CONSTANTS.RESOURCE_SHOW_TREE).then((res) => + setResourceState((prevState) => ({ ...prevState, treeData: res ?? [] })) ); + }; - /** - * the node click event - * @param info - * @returns {Promise} - */ - const handleNodeClick = async (info: any): Promise => { - const { - node: {id, isLeaf, key, name}, - node - } = info; - setResourceState((prevState) => ({...prevState, selectedKeys: [key], clickedNode: node})); - if (isLeaf && !unSupportView(name)) { - await queryContent(id); - } else { - setResourceState((prevState) => ({...prevState, content: ''})); - } - }; + useAsyncEffect(async () => { + await refreshTree(); + }, []); - /** - * the node right click event OF upload, - */ - const handleCreateFolder = () => { - if (resourceState.rightClickedNode) { - setEditModal('createFolder'); - const {id} = resourceState.rightClickedNode; - setResourceState((prevState) => ({ - ...prevState, - editOpen: true, - value: {id, fileName: '', description: ''}, - contextMenuOpen: false - })); - } - }; - const handleUpload = () => { - if (resourceState.rightClickedNode) { - uploadValue.pid = resourceState.rightClickedNode.id; - // todo: upload - setResourceState((prevState) => ({...prevState, uploadOpen: true, contextMenuOpen: false})); - } - }; + /** + * query content by id + * @type {(id: number) => Promise} + */ + const queryContent: (id: number) => Promise = useCallback(async (id: number) => { + await queryDataByParams(API_CONSTANTS.RESOURCE_GET_CONTENT_BY_ID, { + id + }).then((res) => setResourceState((prevState) => ({ ...prevState, content: res ?? '' }))); + }, []); - /** - * the node right click event OF delete, - */ - const handleDelete = async () => { - if (resourceState.rightClickedNode) { - setResourceState((prevState) => ({...prevState, contextMenuOpen: false})); - await handleRemoveById(API_CONSTANTS.RESOURCE_REMOVE, resourceState.rightClickedNode.id); - await refreshTree(); - } - }; + /** + * the node click event + * @param info + * @returns {Promise} + */ + const handleNodeClick = async (info: any): Promise => { + const { + node: { id, isLeaf, key, name }, + node + } = info; + setResourceState((prevState) => ({ ...prevState, selectedKeys: [key], clickedNode: node })); + if (isLeaf && !unSupportView(name)) { + await queryContent(id); + } else { + setResourceState((prevState) => ({ ...prevState, content: '' })); + } + }; - /** - * the node right click event OF rename, - */ - const handleRename = () => { - if (resourceState.rightClickedNode) { - setEditModal('rename'); - const {id, name, desc} = resourceState.rightClickedNode; - setResourceState((prevState) => ({ - ...prevState, - editOpen: true, - value: {id, fileName: name, description: desc}, - contextMenuOpen: false - })); - } - }; + /** + * the node right click event OF upload, + */ + const handleCreateFolder = () => { + if (resourceState.rightClickedNode) { + setEditModal('createFolder'); + const { id } = resourceState.rightClickedNode; + setResourceState((prevState) => ({ + ...prevState, + editOpen: true, + value: { id, fileName: '', description: '' }, + contextMenuOpen: false + })); + } + }; + const handleUpload = () => { + if (resourceState.rightClickedNode) { + uploadValue.pid = resourceState.rightClickedNode.id; + // todo: upload + setResourceState((prevState) => ({ ...prevState, uploadOpen: true, contextMenuOpen: false })); + } + }; - const handleMenuClick = async (node: MenuInfo) => { - switch (node.key) { - case 'createFolder': - handleCreateFolder(); - break; - case 'upload': - handleUpload(); - break; - case 'delete': - await handleDelete(); - break; - case 'rename': - handleRename(); - break; - default: - break; - } - }; + /** + * the node right click event OF delete, + */ + const handleDelete = async () => { + if (resourceState.rightClickedNode) { + setResourceState((prevState) => ({ ...prevState, contextMenuOpen: false })); + await handleRemoveById(API_CONSTANTS.RESOURCE_REMOVE, resourceState.rightClickedNode.id); + await refreshTree(); + } + }; - /** - * the right click event - * @param info - */ - const handleRightClick = (info: any) => { - // 获取右键点击的节点信息 - const {node, event} = info; - console.log('node', node); - setResourceState((prevState) => ({ - ...prevState, - selectedKeys: [node.key], - rightClickedNode: node, - contextMenuOpen: true, - contextMenuPosition: { - ...prevState.contextMenuPosition, - left: event.clientX + 20, - top: event.clientY + 20 - } - })); - }; + /** + * the node right click event OF rename, + */ + const handleRename = () => { + if (resourceState.rightClickedNode) { + setEditModal('rename'); + const { id, name, desc } = resourceState.rightClickedNode; + setResourceState((prevState) => ({ + ...prevState, + editOpen: true, + value: { id, fileName: name, description: desc }, + contextMenuOpen: false + })); + } + }; - /** - * the rename cancel - */ - const handleModalCancel = async () => { - setResourceState((prevState) => ({...prevState, editOpen: false})); - await refreshTree(); - }; + const handleMenuClick = async (node: MenuInfo) => { + switch (node.key) { + case 'createFolder': + handleCreateFolder(); + break; + case 'upload': + handleUpload(); + break; + case 'delete': + await handleDelete(); + break; + case 'rename': + handleRename(); + break; + default: + break; + } + }; - /** - * the rename ok - */ - const handleModalSubmit = async (value: Partial) => { - const {id: pid} = resourceState.rightClickedNode; - if (editModal === 'createFolder') { - await handleOption(API_CONSTANTS.RESOURCE_CREATE_FOLDER, l('right.menu.createFolder'), { - ...value, - pid - }); - setResourceState((prevState) => ({...prevState, editOpen: false})); - } else if (editModal === 'rename') { - await handleOption(API_CONSTANTS.RESOURCE_RENAME, l('right.menu.rename'), {...value, pid}); - } - }; - const handleUploadCancel = async () => { - setResourceState((prevState) => ({...prevState, uploadOpen: false})); - await refreshTree(); - }; + /** + * the right click event + * @param info + */ + const handleRightClick = (info: any) => { + // 获取右键点击的节点信息 + const { node, event } = info; + console.log('node', node); + setResourceState((prevState) => ({ + ...prevState, + selectedKeys: [node.key], + rightClickedNode: node, + contextMenuOpen: true, + contextMenuPosition: { + ...prevState.contextMenuPosition, + left: event.clientX + 20, + top: event.clientY + 20 + } + })); + }; - /** - * the content change - * @param value - */ - const handleContentChange = (value: any) => { - setResourceState((prevState) => ({...prevState, content: value})); - // todo: save content - }; + /** + * the rename cancel + */ + const handleModalCancel = async () => { + setResourceState((prevState) => ({ ...prevState, editOpen: false })); + await refreshTree(); + }; - const access = useAccess(); + /** + * the rename ok + */ + const handleModalSubmit = async (value: Partial) => { + const { id: pid } = resourceState.rightClickedNode; + if (editModal === 'createFolder') { + await handleOption(API_CONSTANTS.RESOURCE_CREATE_FOLDER, l('right.menu.createFolder'), { + ...value, + pid + }); + setResourceState((prevState) => ({ ...prevState, editOpen: false })); + } else if (editModal === 'rename') { + await handleOption(API_CONSTANTS.RESOURCE_RENAME, l('right.menu.rename'), { ...value, pid }); + } + }; + const handleUploadCancel = async () => { + setResourceState((prevState) => ({ ...prevState, uploadOpen: false })); + await refreshTree(); + }; - const renderRightMenu = () => { - if (!resourceState.rightClickedNode.isLeaf) { - return RIGHT_CONTEXT_FOLDER_MENU.filter( - (menu) => !menu.path || !!AuthorizedObject({path: menu.path, children: menu, access}) - ); - } - return RIGHT_CONTEXT_FILE_MENU.filter( - (menu) => !menu.path || !!AuthorizedObject({path: menu.path, children: menu, access}) - ); - }; + /** + * the content change + * @param value + */ + const handleContentChange = (value: any) => { + setResourceState((prevState) => ({ ...prevState, content: value })); + // todo: save content + }; - /** - * render - */ - return ( - <> - - - - handleNodeClick(info)} - /> - - setResourceState((prevState) => ({...prevState, contextMenuOpen: false})) - } - items={renderRightMenu()} - onClick={handleMenuClick} - /> - - - - - - - - {resourceState.editOpen && ( - - )} - {resourceState.uploadOpen && ( - - )} - + const access = useAccess(); + + const renderRightMenu = () => { + if (!resourceState.rightClickedNode.isLeaf) { + return RIGHT_CONTEXT_FOLDER_MENU.filter( + (menu) => !menu.path || !!AuthorizedObject({ path: menu.path, children: menu, access }) + ); + } + return RIGHT_CONTEXT_FILE_MENU.filter( + (menu) => !menu.path || !!AuthorizedObject({ path: menu.path, children: menu, access }) ); + }; + + /** + * render + */ + return ( + <> + + + + handleNodeClick(info)} + /> + + setResourceState((prevState) => ({ ...prevState, contextMenuOpen: false })) + } + items={renderRightMenu()} + onClick={handleMenuClick} + /> + + + + + + + + {resourceState.editOpen && ( + + )} + {resourceState.uploadOpen && ( + + )} + + ); }; export default ResourceOverView; diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx index e32b6c7e00..f7ee8d736c 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx @@ -19,11 +19,13 @@ import { EditBtn } from '@/components/CallBackButton/EditBtn'; import { PopconfirmDeleteBtn } from '@/components/CallBackButton/PopconfirmDeleteBtn'; +import { BackIcon } from '@/components/Icons/CustomIcons'; import UDFRegisterModal from '@/pages/RegCenter/UDF/components/UDFRegister/UDFRegisterModal'; import { API_CONSTANTS } from '@/services/endpoints'; import { UDFRegisterInfo, UDFRegisterInfoParent } from '@/types/RegCenter/data'; import { l } from '@/utils/intl'; import { useRequest } from '@@/plugin-request'; +import { SaveTwoTone } from '@ant-design/icons'; import { ProColumns } from '@ant-design/pro-components'; import ProTable, { ActionType } from '@ant-design/pro-table'; import React, { Key, useEffect, useRef, useState } from 'react'; @@ -182,6 +184,8 @@ const UDFRegister: React.FC = (props) => { editable={{ deleteText: false, type: 'single', + saveText: , + cancelText: , editableKeys: udfRegisterState.editableKeys, onChange: editableKeysChange, onSave: async (_, row) => handleOnSave(row), diff --git a/script/sql/dinky-mysql.sql b/script/sql/dinky-mysql.sql index fa7dd2f6ed..581b430c61 100644 --- a/script/sql/dinky-mysql.sql +++ b/script/sql/dinky-mysql.sql @@ -1550,7 +1550,7 @@ CREATE TABLE `dinky_git_project` ( `last_build` datetime DEFAULT NULL, `description` varchar(255) DEFAULT NULL, `build_state` tinyint(2) NOT NULL DEFAULT '0' COMMENT '0-notStart 1-process 2-failed 3-success', - `build_step` tinyint(2) NOT NULL DEFAULT '0', + `build_step` tinyint(2) NOT NULL DEFAULT '0' COMMENT 'different from java and python, when build java project, the step value is as follows: 0: environment check 1: clone project 2: compile and build 3: get artifact 4: analyze UDF 5: finish; when build python project, the step value is as follows: 0: environment check 1: clone project 2: get artifact 3: analyze UDF 4: finish', `enabled` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0-disable 1-enable', `udf_class_map_list` text COMMENT 'scan udf class', `order_line` int(11) NOT NULL DEFAULT '1' COMMENT 'order', diff --git a/script/sql/dinky-pg.sql b/script/sql/dinky-pg.sql index a1e277e0c7..7d7bd15134 100644 --- a/script/sql/dinky-pg.sql +++ b/script/sql/dinky-pg.sql @@ -1204,10 +1204,22 @@ CREATE TABLE "public"."dinky_git_project" ( "update_time" timestamp(6) NOT null ) ; +COMMENT ON COLUMN "public"."dinky_git_project"."id" IS 'ID'; +COMMENT ON COLUMN "public"."dinky_git_project"."tenant_id" IS 'tenant id'; +COMMENT ON COLUMN "public"."dinky_git_project"."name" IS 'project name'; +COMMENT ON COLUMN "public"."dinky_git_project"."url" IS 'git url'; +COMMENT ON COLUMN "public"."dinky_git_project"."branch" IS 'git branch'; +COMMENT ON COLUMN "public"."dinky_git_project"."username" IS 'username'; +COMMENT ON COLUMN "public"."dinky_git_project"."password" IS 'password'; +COMMENT ON COLUMN "public"."dinky_git_project"."pom" IS 'pom'; +COMMENT ON COLUMN "public"."dinky_git_project"."build_args" IS 'build args'; COMMENT ON COLUMN "public"."dinky_git_project"."private_key" IS 'keypath'; COMMENT ON COLUMN "public"."dinky_git_project"."code_type" IS 'code type(1-java,2-python)'; COMMENT ON COLUMN "public"."dinky_git_project"."type" IS '1-http ,2-ssh'; +COMMENT ON COLUMN "public"."dinky_git_project"."last_build" IS 'last build time'; +COMMENT ON COLUMN "public"."dinky_git_project"."description" IS 'description'; COMMENT ON COLUMN "public"."dinky_git_project"."build_state" IS '0-notStart 1-process 2-failed 3-success'; +COMMENT ON COLUMN "public"."dinky_git_project"."build_step" IS 'different from java and python, when build java project, the step value is as follows: 0: environment check 1: clone project 2: compile and build 3: get artifact 4: analyze UDF 5: finish; when build python project, the step value is as follows: 0: environment check 1: clone project 2: get artifact 3: analyze UDF 4: finish'; COMMENT ON COLUMN "public"."dinky_git_project"."enabled" IS '0-disable 1-enable'; COMMENT ON COLUMN "public"."dinky_git_project"."udf_class_map_list" IS 'scan udf class'; COMMENT ON COLUMN "public"."dinky_git_project"."order_line" IS 'order'; diff --git a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql index 56f0c3987b..f9f90b0ade 100644 --- a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql +++ b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql @@ -97,7 +97,7 @@ CREATE TABLE `dinky_git_project` ( `last_build` datetime DEFAULT NULL, `description` varchar(255) DEFAULT NULL, `build_state` tinyint(2) NOT NULL DEFAULT '0' COMMENT '0-notStart 1-process 2-failed 3-success', - `build_step` tinyint(2) NOT NULL DEFAULT '0', + `build_step` tinyint(2) NOT NULL DEFAULT '0' COMMENT 'different from java and python, when build java project, the step value is as follows: 0: environment check 1: clone project 2: compile and build 3: get artifact 4: analyze UDF 5: finish; when build python project, the step value is as follows: 0: environment check 1: clone project 2: get artifact 3: analyze UDF 4: finish', `enabled` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0-disable 1-enable', `udf_class_map_list` text COMMENT 'scan udf class', `order_line` int(11) NOT NULL DEFAULT '1' COMMENT 'order', From 0fa9655e007ed456262a6d4ba494348338b28aaf Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Wed, 8 Nov 2023 09:22:54 +0800 Subject: [PATCH 18/21] [Refactor]Refactor javabean postion (#2507) * refactor_javabean_postion * fix_udf_python * spotless --- dinky-admin/pom.xml | 32 ---------- .../main/java/org/dinky/aop/LogAspect.java | 4 +- .../dinky/configure/MybatisPlusConfig.java | 8 +-- .../schedule/metrics/GatherSysIndicator.java | 2 +- .../org/dinky/controller/APIController.java | 2 +- .../controller/AlertGroupController.java | 2 +- .../controller/AlertHistoryController.java | 2 +- .../controller/AlertInstanceController.java | 2 +- .../dinky/controller/AlertRuleController.java | 2 +- .../controller/AlertTemplateController.java | 2 +- .../dinky/controller/CatalogueController.java | 2 +- .../ClusterConfigurationController.java | 2 +- .../controller/ClusterInstanceController.java | 2 +- .../dinky/controller/DataBaseController.java | 2 +- .../dinky/controller/DocumentController.java | 2 +- .../dinky/controller/DownloadController.java | 2 +- .../dinky/controller/FragmentController.java | 2 +- .../org/dinky/controller/GitController.java | 2 +- .../controller/JobInstanceController.java | 2 +- .../org/dinky/controller/LdapController.java | 2 +- .../org/dinky/controller/MenuController.java | 2 +- .../dinky/controller/MonitorController.java | 2 +- .../dinky/controller/ResourceController.java | 2 +- .../org/dinky/controller/RoleController.java | 2 +- .../dinky/controller/RoleMenuController.java | 2 +- .../controller/RowPermissionsController.java | 2 +- .../dinky/controller/StudioController.java | 2 +- .../dinky/controller/SysConfigController.java | 2 +- .../org/dinky/controller/TaskController.java | 2 +- .../controller/TaskVersionController.java | 2 +- .../dinky/controller/TenantController.java | 2 +- .../org/dinky/controller/TokenController.java | 2 +- .../controller/UDFTemplateController.java | 2 +- .../org/dinky/controller/UserController.java | 2 +- .../annotation/ConditionalOnListProperty.java | 58 ------------------- .../java/org/dinky/data/model/AlertRule.java | 2 +- .../java/org/dinky/service/task/BaseTask.java | 2 +- .../org/dinky/service/task/CommonSqlTask.java | 2 +- .../dinky/service/task/FlinkJarSqlTask.java | 2 +- .../org/dinky/service/task/FlinkSqlTask.java | 2 +- .../java/org/dinky/service/task/UdfTask.java | 2 +- dinky-common/pom.xml | 27 +++++++++ .../org/dinky/data/annotations}/GaugeM.java | 2 +- .../java/org/dinky/data/annotations}/Log.java | 8 +-- .../data/annotations}/SupportDialect.java | 8 +-- .../org/dinky/data/constant/BaseConstant.java | 0 .../org/dinky/data/constant/DirConstant.java | 18 +++--- .../data/constant/PermissionConstants.java | 0 .../org/dinky/data/enums/BusinessStatus.java | 0 .../org/dinky/data/enums/BusinessType.java | 0 .../java/org/dinky/data/enums/CodeEnum.java | 0 .../java/org/dinky/data/enums/MenuType.java | 0 .../org/dinky/data/enums/MetricsType.java | 0 .../java/org/dinky/data/enums/UserType.java | 0 .../dinky/data/exception/AuthException.java | 0 .../dinky/data/exception/BusException.java | 0 .../NotSupportExplainExcepition.java | 0 .../data/exception/SqlExplainExcepition.java | 0 .../data/exception/TaskNotDoneException.java | 0 .../org/dinky/data/metrics/BaseMetrics.java | 0 .../main/java/org/dinky/data/metrics/Cpu.java | 2 +- .../main/java/org/dinky/data/metrics/Jvm.java | 2 +- .../main/java/org/dinky/data/metrics/Mem.java | 2 +- .../org/dinky/data/metrics/MetricsTotal.java | 0 pom.xml | 1 + 65 files changed, 89 insertions(+), 157 deletions(-) delete mode 100644 dinky-admin/src/main/java/org/dinky/data/annotation/ConditionalOnListProperty.java rename {dinky-admin/src/main/java/org/dinky/data/annotation => dinky-common/src/main/java/org/dinky/data/annotations}/GaugeM.java (97%) rename {dinky-admin/src/main/java/org/dinky/data/annotation => dinky-common/src/main/java/org/dinky/data/annotations}/Log.java (87%) rename {dinky-admin/src/main/java/org/dinky/data/annotation => dinky-common/src/main/java/org/dinky/data/annotations}/SupportDialect.java (91%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/constant/BaseConstant.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/constant/DirConstant.java (66%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/constant/PermissionConstants.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/enums/BusinessStatus.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/enums/BusinessType.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/enums/CodeEnum.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/enums/MenuType.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/enums/MetricsType.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/enums/UserType.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/exception/AuthException.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/exception/BusException.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/exception/TaskNotDoneException.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/metrics/BaseMetrics.java (100%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/metrics/Cpu.java (97%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/metrics/Jvm.java (98%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/metrics/Mem.java (97%) rename {dinky-admin => dinky-common}/src/main/java/org/dinky/data/metrics/MetricsTotal.java (100%) diff --git a/dinky-admin/pom.xml b/dinky-admin/pom.xml index 17cfbdf6f5..7fd1b2f957 100644 --- a/dinky-admin/pom.xml +++ b/dinky-admin/pom.xml @@ -32,7 +32,6 @@ provided - 5.13.1.202206130422-r 42.5.1 @@ -175,10 +174,6 @@ org.springframework.boot spring-boot-starter-data-redis - - org.apache.commons - commons-lang3 - org.dinky dinky-core @@ -298,28 +293,6 @@ junit-jupiter test - - org.eclipse.jgit - org.eclipse.jgit.archive - ${jgit.version} - - - org.slf4j - slf4j-api - - - - - org.eclipse.jgit - org.eclipse.jgit.ssh.jsch - ${jgit.version} - - - org.slf4j - slf4j-api - - - org.springframework.boot spring-boot-starter-data-ldap @@ -339,11 +312,6 @@ - - - com.github.oshi - oshi-core - org.apache.paimon paimon-bundle diff --git a/dinky-admin/src/main/java/org/dinky/aop/LogAspect.java b/dinky-admin/src/main/java/org/dinky/aop/LogAspect.java index a394f88fac..6a3ce0c72e 100644 --- a/dinky-admin/src/main/java/org/dinky/aop/LogAspect.java +++ b/dinky-admin/src/main/java/org/dinky/aop/LogAspect.java @@ -20,7 +20,7 @@ package org.dinky.aop; import org.dinky.context.UserInfoContextHolder; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.enums.BusinessStatus; import org.dinky.data.model.OperateLog; import org.dinky.data.model.User; @@ -63,7 +63,7 @@ @Component public class LogAspect { - @Pointcut("@annotation(org.dinky.data.annotation.Log)") + @Pointcut("@annotation(org.dinky.data.annotations.Log)") public void logPointCut() {} /** diff --git a/dinky-admin/src/main/java/org/dinky/configure/MybatisPlusConfig.java b/dinky-admin/src/main/java/org/dinky/configure/MybatisPlusConfig.java index 38bd99c86f..96ae498282 100644 --- a/dinky-admin/src/main/java/org/dinky/configure/MybatisPlusConfig.java +++ b/dinky-admin/src/main/java/org/dinky/configure/MybatisPlusConfig.java @@ -20,7 +20,6 @@ package org.dinky.configure; import org.dinky.context.TenantContextHolder; -import org.dinky.data.annotation.ConditionalOnListProperty; import org.dinky.interceptor.PostgreSQLPrepareInterceptor; import org.dinky.interceptor.PostgreSQLQueryInterceptor; import org.dinky.mybatis.handler.DateMetaObjectHandler; @@ -34,6 +33,7 @@ import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Profile; import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler; import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor; @@ -80,8 +80,7 @@ public class MybatisPlusConfig { "dinky_task_version"); @Bean - // @ConditionalOnProperty(name = "spring.profiles.active", havingValue = "pgsql , jmx") - @ConditionalOnListProperty(name = "spring.profiles.active", havingValue = "pgsql") + @Profile("pgsql") public PostgreSQLQueryInterceptor postgreSQLQueryInterceptor() { return new PostgreSQLQueryInterceptor(); } @@ -92,8 +91,7 @@ public PostgreSQLQueryInterceptor postgreSQLQueryInterceptor() { * @return {@linkplain PostgreSQLPrepareInterceptor} */ @Bean - // @ConditionalOnProperty(name = "spring.profiles.active", havingValue = "pgsql , jmx") - @ConditionalOnListProperty(name = "spring.profiles.active", havingValue = "pgsql") + @Profile("pgsql") public PostgreSQLPrepareInterceptor postgreSQLPrepareInterceptor() { return new PostgreSQLPrepareInterceptor(); } diff --git a/dinky-admin/src/main/java/org/dinky/configure/schedule/metrics/GatherSysIndicator.java b/dinky-admin/src/main/java/org/dinky/configure/schedule/metrics/GatherSysIndicator.java index 9392ea8e12..f10a965dea 100644 --- a/dinky-admin/src/main/java/org/dinky/configure/schedule/metrics/GatherSysIndicator.java +++ b/dinky-admin/src/main/java/org/dinky/configure/schedule/metrics/GatherSysIndicator.java @@ -21,7 +21,7 @@ import org.dinky.configure.schedule.BaseSchedule; import org.dinky.context.MetricsContextHolder; -import org.dinky.data.annotation.GaugeM; +import org.dinky.data.annotations.GaugeM; import org.dinky.data.enums.MetricsType; import org.dinky.data.metrics.BaseMetrics; import org.dinky.data.metrics.Cpu; diff --git a/dinky-admin/src/main/java/org/dinky/controller/APIController.java b/dinky-admin/src/main/java/org/dinky/controller/APIController.java index 702085469e..4c9cf5e28a 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/APIController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/APIController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.dto.TaskDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/AlertGroupController.java b/dinky-admin/src/main/java/org/dinky/controller/AlertGroupController.java index 16a13d6989..57030ed9f4 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/AlertGroupController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/AlertGroupController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/AlertHistoryController.java b/dinky-admin/src/main/java/org/dinky/controller/AlertHistoryController.java index 442a68f17b..da8465b093 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/AlertHistoryController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/AlertHistoryController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; import org.dinky.data.model.AlertHistory; diff --git a/dinky-admin/src/main/java/org/dinky/controller/AlertInstanceController.java b/dinky-admin/src/main/java/org/dinky/controller/AlertInstanceController.java index 2ffaef9ff7..bb2e27b2c5 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/AlertInstanceController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/AlertInstanceController.java @@ -21,7 +21,7 @@ import org.dinky.alert.AlertPool; import org.dinky.alert.AlertResult; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.AlertInstanceDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/AlertRuleController.java b/dinky-admin/src/main/java/org/dinky/controller/AlertRuleController.java index 65fe1b3e82..9c7777a200 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/AlertRuleController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/AlertRuleController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/AlertTemplateController.java b/dinky-admin/src/main/java/org/dinky/controller/AlertTemplateController.java index 701a572b7c..d76f3e110a 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/AlertTemplateController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/AlertTemplateController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java b/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java index f921ea57b7..c0170eb1e3 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/CatalogueController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.dto.CatalogueTaskDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/ClusterConfigurationController.java b/dinky-admin/src/main/java/org/dinky/controller/ClusterConfigurationController.java index b8adec68ab..d6ea31c9ce 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/ClusterConfigurationController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/ClusterConfigurationController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.ClusterConfigurationDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java b/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java index 9d76a170c1..b92f4a4229 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/ClusterInstanceController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.ClusterInstanceDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/DataBaseController.java b/dinky-admin/src/main/java/org/dinky/controller/DataBaseController.java index 2914165c91..9b328fd26e 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/DataBaseController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/DataBaseController.java @@ -20,7 +20,7 @@ package org.dinky.controller; import org.dinky.assertion.Asserts; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.CommonConstant; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.DataBaseDTO; diff --git a/dinky-admin/src/main/java/org/dinky/controller/DocumentController.java b/dinky-admin/src/main/java/org/dinky/controller/DocumentController.java index aefefeb2e8..6044472230 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/DocumentController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/DocumentController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java b/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java index 652b31b872..1e94b040f6 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/DownloadController.java @@ -20,7 +20,7 @@ package org.dinky.controller; import org.dinky.assertion.Asserts; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.enums.BusinessType; import org.dinky.data.exception.BusException; import org.dinky.data.model.FlinkUdfManifest; diff --git a/dinky-admin/src/main/java/org/dinky/controller/FragmentController.java b/dinky-admin/src/main/java/org/dinky/controller/FragmentController.java index 055da43477..201ac2de6c 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/FragmentController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/FragmentController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/GitController.java b/dinky-admin/src/main/java/org/dinky/controller/GitController.java index bd74ebcc11..a66219ae2d 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/GitController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/GitController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.GitProjectDTO; import org.dinky.data.dto.TreeNodeDTO; diff --git a/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java b/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java index 72201b7720..337648ef2d 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JobInstanceController.java @@ -21,7 +21,7 @@ import org.dinky.api.FlinkAPI; import org.dinky.assertion.Asserts; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.enums.BusinessType; import org.dinky.data.model.ID; import org.dinky.data.model.JobInfoDetail; diff --git a/dinky-admin/src/main/java/org/dinky/controller/LdapController.java b/dinky-admin/src/main/java/org/dinky/controller/LdapController.java index b72ce1d25c..ef2d386295 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/LdapController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/LdapController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.dto.LoginDTO; import org.dinky.data.dto.UserDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/MenuController.java b/dinky-admin/src/main/java/org/dinky/controller/MenuController.java index 667cf0e55e..d904b7d95a 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/MenuController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/MenuController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.MenuDTO; import org.dinky.data.dto.RoleMenuDTO; diff --git a/dinky-admin/src/main/java/org/dinky/controller/MonitorController.java b/dinky-admin/src/main/java/org/dinky/controller/MonitorController.java index c52d1e3028..a368b7b5f5 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/MonitorController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/MonitorController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.dto.MetricsLayoutDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.MetricsType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/ResourceController.java b/dinky-admin/src/main/java/org/dinky/controller/ResourceController.java index 5c0ae8f7ec..cf7ea7f7ab 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/ResourceController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/ResourceController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.ResourcesDTO; import org.dinky.data.dto.TreeNodeDTO; diff --git a/dinky-admin/src/main/java/org/dinky/controller/RoleController.java b/dinky-admin/src/main/java/org/dinky/controller/RoleController.java index d760cb1328..9dbe28de1e 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/RoleController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/RoleController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.RoleDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/RoleMenuController.java b/dinky-admin/src/main/java/org/dinky/controller/RoleMenuController.java index 79f9235558..8fd172de6a 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/RoleMenuController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/RoleMenuController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.AssignMenuToRoleDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/controller/RowPermissionsController.java b/dinky-admin/src/main/java/org/dinky/controller/RowPermissionsController.java index 9ad55ca06d..89c6275980 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/RowPermissionsController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/RowPermissionsController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/StudioController.java b/dinky-admin/src/main/java/org/dinky/controller/StudioController.java index 4f5dcd8491..64d94b7d4f 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/StudioController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/StudioController.java @@ -20,7 +20,7 @@ package org.dinky.controller; import org.dinky.assertion.Asserts; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.dto.StudioDDLDTO; import org.dinky.data.dto.StudioLineageDTO; import org.dinky.data.dto.StudioMetaStoreDTO; diff --git a/dinky-admin/src/main/java/org/dinky/controller/SysConfigController.java b/dinky-admin/src/main/java/org/dinky/controller/SysConfigController.java index c966f4250f..6daaa0368e 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/SysConfigController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/SysConfigController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; import org.dinky.data.model.Configuration; diff --git a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java index bc23c4fe79..4a8a0d1c9b 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TaskController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TaskController.java @@ -19,8 +19,8 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; import org.dinky.data.annotations.ExecuteProcess; +import org.dinky.data.annotations.Log; import org.dinky.data.annotations.ProcessId; import org.dinky.data.dto.DebugDTO; import org.dinky.data.dto.TaskDTO; diff --git a/dinky-admin/src/main/java/org/dinky/controller/TaskVersionController.java b/dinky-admin/src/main/java/org/dinky/controller/TaskVersionController.java index 5ff5234218..952fea1617 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TaskVersionController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TaskVersionController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.dto.TaskVersionHistoryDTO; import org.dinky.data.enums.BusinessType; import org.dinky.data.model.TaskVersion; diff --git a/dinky-admin/src/main/java/org/dinky/controller/TenantController.java b/dinky-admin/src/main/java/org/dinky/controller/TenantController.java index b5e6b8a186..db863fa0dd 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TenantController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TenantController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.model.Tenant; diff --git a/dinky-admin/src/main/java/org/dinky/controller/TokenController.java b/dinky-admin/src/main/java/org/dinky/controller/TokenController.java index 8db9b8247e..93656e4266 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TokenController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TokenController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFTemplateController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFTemplateController.java index 49cec7f746..7b7ffacac3 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFTemplateController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFTemplateController.java @@ -19,7 +19,7 @@ package org.dinky.controller; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.enums.BusinessType; import org.dinky.data.enums.Status; diff --git a/dinky-admin/src/main/java/org/dinky/controller/UserController.java b/dinky-admin/src/main/java/org/dinky/controller/UserController.java index 6325d6baac..d834f7a9b9 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UserController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UserController.java @@ -20,7 +20,7 @@ package org.dinky.controller; import org.dinky.assertion.Asserts; -import org.dinky.data.annotation.Log; +import org.dinky.data.annotations.Log; import org.dinky.data.constant.PermissionConstants; import org.dinky.data.dto.ModifyPasswordDTO; import org.dinky.data.enums.BusinessType; diff --git a/dinky-admin/src/main/java/org/dinky/data/annotation/ConditionalOnListProperty.java b/dinky-admin/src/main/java/org/dinky/data/annotation/ConditionalOnListProperty.java deleted file mode 100644 index 4f0c7dac57..0000000000 --- a/dinky-admin/src/main/java/org/dinky/data/annotation/ConditionalOnListProperty.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.data.annotation; - -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.util.Arrays; - -import org.springframework.context.annotation.Condition; -import org.springframework.context.annotation.ConditionContext; -import org.springframework.context.annotation.Conditional; -import org.springframework.core.type.AnnotatedTypeMetadata; - -@Target({ElementType.TYPE, ElementType.METHOD}) -@Retention(RetentionPolicy.RUNTIME) -@Documented -@Conditional(CustomListCondition.class) -public @interface ConditionalOnListProperty { - String name(); - - String havingValue(); -} - -class CustomListCondition implements Condition { - - @Override - public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { - String propertyName = (String) metadata.getAnnotationAttributes(ConditionalOnListProperty.class.getName()) - .get("name"); - String requiredValue = (String) metadata.getAnnotationAttributes(ConditionalOnListProperty.class.getName()) - .get("havingValue"); - String[] propertyValues = context.getEnvironment().getProperty(propertyName, String[].class); - if (propertyValues != null) { - return Arrays.asList(propertyValues).contains(requiredValue); - } - return false; - } -} diff --git a/dinky-admin/src/main/java/org/dinky/data/model/AlertRule.java b/dinky-admin/src/main/java/org/dinky/data/model/AlertRule.java index 8013a28ce6..d0cba14a95 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/AlertRule.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/AlertRule.java @@ -36,7 +36,7 @@ @Data @TableName("dinky_alert_rules") @ApiModel(value = "AlertRule", description = "AlertRule") -public class AlertRule extends SuperEntity { +public class AlertRule extends SuperEntity { @ApiModelProperty(value = "rule", required = true, dataType = "String", example = "rule") String rule; diff --git a/dinky-admin/src/main/java/org/dinky/service/task/BaseTask.java b/dinky-admin/src/main/java/org/dinky/service/task/BaseTask.java index 16e5993a04..270bc6617f 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/BaseTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/BaseTask.java @@ -20,7 +20,7 @@ package org.dinky.service.task; import org.dinky.config.Dialect; -import org.dinky.data.annotation.SupportDialect; +import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.exception.NotSupportExplainExcepition; import org.dinky.data.result.SelectResult; diff --git a/dinky-admin/src/main/java/org/dinky/service/task/CommonSqlTask.java b/dinky-admin/src/main/java/org/dinky/service/task/CommonSqlTask.java index 69d7469221..69432ad549 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/CommonSqlTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/CommonSqlTask.java @@ -20,7 +20,7 @@ package org.dinky.service.task; import org.dinky.config.Dialect; -import org.dinky.data.annotation.SupportDialect; +import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.SqlDTO; import org.dinky.data.dto.TaskDTO; import org.dinky.data.result.ResultPool; diff --git a/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java b/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java index c85da03006..a6dd4c804a 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/FlinkJarSqlTask.java @@ -20,7 +20,7 @@ package org.dinky.service.task; import org.dinky.config.Dialect; -import org.dinky.data.annotation.SupportDialect; +import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.result.SqlExplainResult; import org.dinky.job.JobResult; diff --git a/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java b/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java index 80739b1f99..29511c86f8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/FlinkSqlTask.java @@ -20,7 +20,7 @@ package org.dinky.service.task; import org.dinky.config.Dialect; -import org.dinky.data.annotation.SupportDialect; +import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.result.SqlExplainResult; import org.dinky.job.JobManager; diff --git a/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java b/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java index c19bfe8b1c..094670fdc2 100644 --- a/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java +++ b/dinky-admin/src/main/java/org/dinky/service/task/UdfTask.java @@ -20,7 +20,7 @@ package org.dinky.service.task; import org.dinky.config.Dialect; -import org.dinky.data.annotation.SupportDialect; +import org.dinky.data.annotations.SupportDialect; import org.dinky.data.dto.TaskDTO; import org.dinky.data.model.Task; import org.dinky.function.FunctionFactory; diff --git a/dinky-common/pom.xml b/dinky-common/pom.xml index ac205eec14..79d9414507 100644 --- a/dinky-common/pom.xml +++ b/dinky-common/pom.xml @@ -156,6 +156,33 @@ org.apache.httpcomponents httpclient + + org.eclipse.jgit + org.eclipse.jgit.archive + ${jgit.version} + + + org.slf4j + slf4j-api + + + + + org.eclipse.jgit + org.eclipse.jgit.ssh.jsch + ${jgit.version} + + + org.slf4j + slf4j-api + + + + + + com.github.oshi + oshi-core + diff --git a/dinky-admin/src/main/java/org/dinky/data/annotation/GaugeM.java b/dinky-common/src/main/java/org/dinky/data/annotations/GaugeM.java similarity index 97% rename from dinky-admin/src/main/java/org/dinky/data/annotation/GaugeM.java rename to dinky-common/src/main/java/org/dinky/data/annotations/GaugeM.java index 317a7a1110..9b49fc30af 100644 --- a/dinky-admin/src/main/java/org/dinky/data/annotation/GaugeM.java +++ b/dinky-common/src/main/java/org/dinky/data/annotations/GaugeM.java @@ -17,7 +17,7 @@ * */ -package org.dinky.data.annotation; +package org.dinky.data.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; diff --git a/dinky-admin/src/main/java/org/dinky/data/annotation/Log.java b/dinky-common/src/main/java/org/dinky/data/annotations/Log.java similarity index 87% rename from dinky-admin/src/main/java/org/dinky/data/annotation/Log.java rename to dinky-common/src/main/java/org/dinky/data/annotations/Log.java index faec64b42c..4ba7e093a6 100644 --- a/dinky-admin/src/main/java/org/dinky/data/annotation/Log.java +++ b/dinky-common/src/main/java/org/dinky/data/annotations/Log.java @@ -17,7 +17,7 @@ * */ -package org.dinky.data.annotation; +package org.dinky.data.annotations; import org.dinky.data.enums.BusinessType; @@ -33,11 +33,11 @@ @Documented public @interface Log { /** 模块 */ - public String title() default ""; + String title() default ""; /** 功能 */ - public BusinessType businessType() default BusinessType.OTHER; + BusinessType businessType() default BusinessType.OTHER; /** 是否保存请求的参数 */ - public boolean isSaveRequestData() default true; + boolean isSaveRequestData() default true; } diff --git a/dinky-admin/src/main/java/org/dinky/data/annotation/SupportDialect.java b/dinky-common/src/main/java/org/dinky/data/annotations/SupportDialect.java similarity index 91% rename from dinky-admin/src/main/java/org/dinky/data/annotation/SupportDialect.java rename to dinky-common/src/main/java/org/dinky/data/annotations/SupportDialect.java index c706b92423..f46878a311 100644 --- a/dinky-admin/src/main/java/org/dinky/data/annotation/SupportDialect.java +++ b/dinky-common/src/main/java/org/dinky/data/annotations/SupportDialect.java @@ -17,7 +17,7 @@ * */ -package org.dinky.data.annotation; +package org.dinky.data.annotations; import org.dinky.config.Dialect; @@ -28,14 +28,14 @@ import java.lang.annotation.Target; /** - * 自定义操作日志记录注解 + * task support dialect */ @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface SupportDialect { /** - * 模块 + * Support Dialect */ - public Dialect[] value(); + Dialect[] value(); } diff --git a/dinky-admin/src/main/java/org/dinky/data/constant/BaseConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/BaseConstant.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/constant/BaseConstant.java rename to dinky-common/src/main/java/org/dinky/data/constant/BaseConstant.java diff --git a/dinky-admin/src/main/java/org/dinky/data/constant/DirConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java similarity index 66% rename from dinky-admin/src/main/java/org/dinky/data/constant/DirConstant.java rename to dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java index 8885fee7c1..d71f0cd972 100644 --- a/dinky-admin/src/main/java/org/dinky/data/constant/DirConstant.java +++ b/dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java @@ -19,7 +19,9 @@ package org.dinky.data.constant; -import org.springframework.boot.system.ApplicationHome; +import java.io.File; + +import cn.hutool.core.io.FileUtil; /** * DirConstant @@ -28,14 +30,8 @@ */ public class DirConstant { - public static final String FILE_SEPARATOR = "file.separator"; - public static final String LOG_DIR_PATH; - public static final String ROOT_LOG_PATH; - - static { - String separator = System.getProperty(FILE_SEPARATOR); - String rootPath = new ApplicationHome().getDir().getPath(); - LOG_DIR_PATH = rootPath + separator + "logs"; - ROOT_LOG_PATH = LOG_DIR_PATH + separator + "dinky.log"; - } + public static final String FILE_SEPARATOR = File.separator; + public static final String ROOT_PATH = FileUtil.getUserHomeDir().getAbsolutePath(); + public static final String LOG_DIR_PATH = ROOT_PATH + FILE_SEPARATOR + "logs"; + public static final String ROOT_LOG_PATH = LOG_DIR_PATH + FILE_SEPARATOR + "dinky.log"; } diff --git a/dinky-admin/src/main/java/org/dinky/data/constant/PermissionConstants.java b/dinky-common/src/main/java/org/dinky/data/constant/PermissionConstants.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/constant/PermissionConstants.java rename to dinky-common/src/main/java/org/dinky/data/constant/PermissionConstants.java diff --git a/dinky-admin/src/main/java/org/dinky/data/enums/BusinessStatus.java b/dinky-common/src/main/java/org/dinky/data/enums/BusinessStatus.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/enums/BusinessStatus.java rename to dinky-common/src/main/java/org/dinky/data/enums/BusinessStatus.java diff --git a/dinky-admin/src/main/java/org/dinky/data/enums/BusinessType.java b/dinky-common/src/main/java/org/dinky/data/enums/BusinessType.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/enums/BusinessType.java rename to dinky-common/src/main/java/org/dinky/data/enums/BusinessType.java diff --git a/dinky-admin/src/main/java/org/dinky/data/enums/CodeEnum.java b/dinky-common/src/main/java/org/dinky/data/enums/CodeEnum.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/enums/CodeEnum.java rename to dinky-common/src/main/java/org/dinky/data/enums/CodeEnum.java diff --git a/dinky-admin/src/main/java/org/dinky/data/enums/MenuType.java b/dinky-common/src/main/java/org/dinky/data/enums/MenuType.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/enums/MenuType.java rename to dinky-common/src/main/java/org/dinky/data/enums/MenuType.java diff --git a/dinky-admin/src/main/java/org/dinky/data/enums/MetricsType.java b/dinky-common/src/main/java/org/dinky/data/enums/MetricsType.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/enums/MetricsType.java rename to dinky-common/src/main/java/org/dinky/data/enums/MetricsType.java diff --git a/dinky-admin/src/main/java/org/dinky/data/enums/UserType.java b/dinky-common/src/main/java/org/dinky/data/enums/UserType.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/enums/UserType.java rename to dinky-common/src/main/java/org/dinky/data/enums/UserType.java diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/AuthException.java b/dinky-common/src/main/java/org/dinky/data/exception/AuthException.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/exception/AuthException.java rename to dinky-common/src/main/java/org/dinky/data/exception/AuthException.java diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/BusException.java b/dinky-common/src/main/java/org/dinky/data/exception/BusException.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/exception/BusException.java rename to dinky-common/src/main/java/org/dinky/data/exception/BusException.java diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java b/dinky-common/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java rename to dinky-common/src/main/java/org/dinky/data/exception/NotSupportExplainExcepition.java diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java b/dinky-common/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java rename to dinky-common/src/main/java/org/dinky/data/exception/SqlExplainExcepition.java diff --git a/dinky-admin/src/main/java/org/dinky/data/exception/TaskNotDoneException.java b/dinky-common/src/main/java/org/dinky/data/exception/TaskNotDoneException.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/exception/TaskNotDoneException.java rename to dinky-common/src/main/java/org/dinky/data/exception/TaskNotDoneException.java diff --git a/dinky-admin/src/main/java/org/dinky/data/metrics/BaseMetrics.java b/dinky-common/src/main/java/org/dinky/data/metrics/BaseMetrics.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/metrics/BaseMetrics.java rename to dinky-common/src/main/java/org/dinky/data/metrics/BaseMetrics.java diff --git a/dinky-admin/src/main/java/org/dinky/data/metrics/Cpu.java b/dinky-common/src/main/java/org/dinky/data/metrics/Cpu.java similarity index 97% rename from dinky-admin/src/main/java/org/dinky/data/metrics/Cpu.java rename to dinky-common/src/main/java/org/dinky/data/metrics/Cpu.java index 0f149eb00d..ad991329d7 100644 --- a/dinky-admin/src/main/java/org/dinky/data/metrics/Cpu.java +++ b/dinky-common/src/main/java/org/dinky/data/metrics/Cpu.java @@ -19,7 +19,7 @@ package org.dinky.data.metrics; -import org.dinky.data.annotation.GaugeM; +import org.dinky.data.annotations.GaugeM; import cn.hutool.core.bean.BeanUtil; import cn.hutool.system.oshi.CpuInfo; diff --git a/dinky-admin/src/main/java/org/dinky/data/metrics/Jvm.java b/dinky-common/src/main/java/org/dinky/data/metrics/Jvm.java similarity index 98% rename from dinky-admin/src/main/java/org/dinky/data/metrics/Jvm.java rename to dinky-common/src/main/java/org/dinky/data/metrics/Jvm.java index 9da3f42170..3a4b4c97a4 100644 --- a/dinky-admin/src/main/java/org/dinky/data/metrics/Jvm.java +++ b/dinky-common/src/main/java/org/dinky/data/metrics/Jvm.java @@ -19,7 +19,7 @@ package org.dinky.data.metrics; -import org.dinky.data.annotation.GaugeM; +import org.dinky.data.annotations.GaugeM; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; diff --git a/dinky-admin/src/main/java/org/dinky/data/metrics/Mem.java b/dinky-common/src/main/java/org/dinky/data/metrics/Mem.java similarity index 97% rename from dinky-admin/src/main/java/org/dinky/data/metrics/Mem.java rename to dinky-common/src/main/java/org/dinky/data/metrics/Mem.java index 74994da881..e07e1aa4aa 100644 --- a/dinky-admin/src/main/java/org/dinky/data/metrics/Mem.java +++ b/dinky-common/src/main/java/org/dinky/data/metrics/Mem.java @@ -19,7 +19,7 @@ package org.dinky.data.metrics; -import org.dinky.data.annotation.GaugeM; +import org.dinky.data.annotations.GaugeM; import cn.hutool.system.oshi.OshiUtil; import lombok.AllArgsConstructor; diff --git a/dinky-admin/src/main/java/org/dinky/data/metrics/MetricsTotal.java b/dinky-common/src/main/java/org/dinky/data/metrics/MetricsTotal.java similarity index 100% rename from dinky-admin/src/main/java/org/dinky/data/metrics/MetricsTotal.java rename to dinky-common/src/main/java/org/dinky/data/metrics/MetricsTotal.java diff --git a/pom.xml b/pom.xml index f3ab3bfc66..85d63e24ef 100644 --- a/pom.xml +++ b/pom.xml @@ -77,6 +77,7 @@ 1.6.2 2.3.0 2.9.0 + 5.13.1.202206130422-r 5.9.1 4.1.0 2.19.0 From 4cad347bb0bf630cf500b8bb5d28a7831095da62 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Wed, 8 Nov 2023 20:19:51 +0800 Subject: [PATCH 19/21] [Refactor][Flink] Refactor flink sql parse (#2509) * spotless * spotless * spotless * spotless * spotless * spotless * spotless --- .../executor/CustomTableEnvironmentImpl.java | 31 +- .../executor/CustomTableEnvironmentImpl.java | 23 +- .../executor/CustomTableEnvironmentImpl.java | 43 ++- .../executor/CustomTableEnvironmentImpl.java | 48 ++- .../KubernetesClusterDescriptor.java | 286 ++++++++++++++++++ .../AbstractCustomTableEnvironment.java | 10 +- .../executor/CustomTableEnvironmentImpl.java | 54 +++- .../ExtendedOperationExecutorWrapper.java | 51 ---- .../executor/CustomTableEnvironment.java | 5 +- .../org/dinky/parser/BaseSingleSqlParser.java | 0 .../org/dinky/parser/CustomParserImpl.java | 12 +- .../org/dinky/parser/DeleteSqlParser.java | 0 .../dinky/parser/InsertSelectSqlParser.java | 0 .../org/dinky/parser/InsertSqlParser.java | 0 .../org/dinky/parser/SelectSqlParser.java | 0 .../java/org/dinky/parser/SqlSegment.java | 0 .../main/java/org/dinky/parser/SqlType.java | 0 .../org/dinky/parser/UpdateSqlParser.java | 0 .../org/dinky/trans/AbstractOperation.java | 0 .../java/org/dinky/trans/ExtendOperation.java | 17 +- .../org/dinky/trans/ddl/AddJarOperation.java | 33 +- .../trans/ddl/CreateAggTableOperation.java | 172 +++++++++++ .../CreateTemporalTableFunctionOperation.java | 24 +- .../dinky/trans/ddl/PrintTableOperation.java | 19 +- .../org/dinky/trans/ddl/SetOperation.java | 44 ++- .../dinky/trans/dml/ExecuteJarOperation.java | 21 +- .../trans/parse/AddJarSqlParseStrategy.java | 33 +- .../CreateAggTableSelectSqlParseStrategy.java | 40 ++- ...ateTemporalTableFunctionParseStrategy.java | 7 +- .../trans/parse}/ExecuteJarParseStrategy.java | 2 +- .../trans/parse/SetSqlParseStrategy.java | 67 ++++ .../java/org/dinky/utils/SqlSegmentUtil.java | 40 +++ .../java/org/dinky/explainer/Explainer.java | 4 +- .../main/java/org/dinky/job/JobManager.java | 8 +- .../CustomExtendedOperationExecutorImpl.java | 8 +- .../java/org/dinky/executor/Executor.java | 5 +- .../dinky/parser/SingleSqlParserFactory.java | 6 +- .../java/org/dinky/trans/ddl/AggTable.java | 132 -------- .../trans/ddl/CreateAggTableOperation.java | 74 ----- .../trans/ddl/CreateCDCSourceOperation.java | 5 +- .../{ddl => show}/ShowFragmentOperation.java | 2 +- .../{ddl => show}/ShowFragmentsOperation.java | 2 +- .../dinky/{parser => parse}/SqlTypeTest.java | 4 +- ...emporalTableFunctionParseStrategyTest.java | 2 + dinky-flink/dinky-flink-1.18/pom.xml | 4 +- 45 files changed, 892 insertions(+), 446 deletions(-) create mode 100644 dinky-client/dinky-client-1.18/src/main/java/org/apache/flink/kubernetes/KubernetesClusterDescriptor.java delete mode 100644 dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/ExtendedOperationExecutorWrapper.java rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/BaseSingleSqlParser.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/CustomParserImpl.java (89%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/DeleteSqlParser.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/InsertSelectSqlParser.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/InsertSqlParser.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/SelectSqlParser.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/SqlSegment.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/SqlType.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/parser/UpdateSqlParser.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/AbstractOperation.java (100%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/ExtendOperation.java (59%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/ddl/AddJarOperation.java (69%) create mode 100644 dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java (80%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java (76%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/ddl/SetOperation.java (64%) rename {dinky-executor => dinky-client/dinky-client-base}/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java (85%) rename dinky-executor/src/main/java/org/dinky/parser/check/AddJarSqlParser.java => dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/AddJarSqlParseStrategy.java (74%) rename dinky-executor/src/main/java/org/dinky/parser/CreateAggTableSelectSqlParser.java => dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateAggTableSelectSqlParseStrategy.java (55%) rename {dinky-executor/src/main/java/org/dinky/trans => dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse}/CreateTemporalTableFunctionParseStrategy.java (94%) rename {dinky-executor/src/main/java/org/dinky/trans => dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse}/ExecuteJarParseStrategy.java (98%) create mode 100644 dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/SetSqlParseStrategy.java create mode 100644 dinky-client/dinky-client-base/src/main/java/org/dinky/utils/SqlSegmentUtil.java delete mode 100644 dinky-executor/src/main/java/org/dinky/trans/ddl/AggTable.java delete mode 100644 dinky-executor/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java rename dinky-executor/src/main/java/org/dinky/trans/{ddl => show}/ShowFragmentOperation.java (98%) rename dinky-executor/src/main/java/org/dinky/trans/{ddl => show}/ShowFragmentsOperation.java (98%) rename dinky-executor/src/test/java/org/dinky/{parser => parse}/SqlTypeTest.java (97%) diff --git a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 2c121d8425..c83347c9c8 100644 --- a/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.14/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -70,13 +70,16 @@ import org.apache.flink.table.typeutils.FieldInfoUtils; import org.apache.flink.types.Row; +import java.io.File; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -124,13 +127,7 @@ public static CustomTableEnvironmentImpl createBatch(StreamExecutionEnvironment configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); TableConfig tableConfig = new TableConfig(); tableConfig.addConfiguration(configuration); - return create( - executionEnvironment, - EnvironmentSettings.newInstance() - .useBlinkPlanner() - .inBatchMode() - .build(), - tableConfig); + return create(executionEnvironment, EnvironmentSettings.inBatchMode(), tableConfig); } public static CustomTableEnvironmentImpl create( @@ -295,15 +292,14 @@ public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extr return record; } - public boolean parseAndLoadConfiguration( - String statement, StreamExecutionEnvironment environment, Map setMap) { + public boolean parseAndLoadConfiguration(String statement, Map setMap) { List operations = getParser().parse(statement); for (Operation operation : operations) { if (operation instanceof SetOperation) { - callSet((SetOperation) operation, environment, setMap); + callSet((SetOperation) operation, getStreamExecutionEnvironment(), setMap); return true; } else if (operation instanceof ResetOperation) { - callReset((ResetOperation) operation, environment, setMap); + callReset((ResetOperation) operation, getStreamExecutionEnvironment(), setMap); return true; } } @@ -378,6 +374,19 @@ public void executeCTAS(Operation operation) { } } + @Override + public void addJar(File... jarPath) { + Configuration configuration = this.getRootConfiguration(); + List jars = configuration.get(PipelineOptions.JARS); + if (jars == null) { + configuration.set( + PipelineOptions.JARS, + Arrays.stream(jarPath).map(File::getAbsolutePath).collect(Collectors.toList())); + } else { + CollUtil.addAll(jars, jarPath); + } + } + @Override public void createTemporaryView(String path, DataStream dataStream, Expression... fields) { createTemporaryView(path, fromDataStream(dataStream, fields)); diff --git a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index b02ea33f0d..037b8d1aab 100644 --- a/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.15/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -65,10 +65,13 @@ import org.apache.flink.table.operations.ddl.CreateTableOperation; import org.apache.flink.types.Row; +import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -192,6 +195,19 @@ public ObjectNode getStreamGraph(String statement) { } } + @Override + public void addJar(File... jarPath) { + Configuration configuration = this.getRootConfiguration(); + List jars = configuration.get(PipelineOptions.JARS); + if (jars == null) { + configuration.set( + PipelineOptions.JARS, + Arrays.stream(jarPath).map(File::getAbsolutePath).collect(Collectors.toList())); + } else { + CollUtil.addAll(jars, jarPath); + } + } + @Override public JobPlanInfo getJobPlanInfo(List statements) { return new JobPlanInfo(JsonPlanGenerator.generatePlan(getJobGraphFromInserts(statements))); @@ -254,15 +270,14 @@ public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extr return record; } - public boolean parseAndLoadConfiguration( - String statement, StreamExecutionEnvironment environment, Map setMap) { + public boolean parseAndLoadConfiguration(String statement, Map setMap) { List operations = getParser().parse(statement); for (Operation operation : operations) { if (operation instanceof SetOperation) { - callSet((SetOperation) operation, environment, setMap); + callSet((SetOperation) operation, getStreamExecutionEnvironment(), setMap); return true; } else if (operation instanceof ResetOperation) { - callReset((ResetOperation) operation, environment, setMap); + callReset((ResetOperation) operation, getStreamExecutionEnvironment(), setMap); return true; } } diff --git a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index bffc299814..54f63dcfce 100644 --- a/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.16/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -50,7 +50,9 @@ import org.apache.flink.table.operations.ddl.CreateTableOperation; import org.apache.flink.types.Row; +import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -102,6 +104,33 @@ public static CustomTableEnvironmentImpl create( return new CustomTableEnvironmentImpl(streamTableEnvironment); } + public boolean parseAndLoadConfiguration(String statement, Map setMap) { + List operations = getParser().parse(statement); + for (Operation operation : operations) { + if (operation instanceof SetOperation) { + callSet((SetOperation) operation, getStreamExecutionEnvironment(), setMap); + return true; + } else if (operation instanceof ResetOperation) { + callReset((ResetOperation) operation, getStreamExecutionEnvironment(), setMap); + return true; + } + } + return false; + } + + @Override + public void addJar(File... jarPath) { + Configuration configuration = this.getRootConfiguration(); + List jars = configuration.get(PipelineOptions.JARS); + if (jars == null) { + configuration.set( + PipelineOptions.JARS, + Arrays.stream(jarPath).map(File::getAbsolutePath).collect(Collectors.toList())); + } else { + CollUtil.addAll(jars, jarPath); + } + } + public ObjectNode getStreamGraph(String statement) { List operations = super.getParser().parse(statement); if (operations.size() != 1) { @@ -198,20 +227,6 @@ public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extr return record; } - public boolean parseAndLoadConfiguration( - String statement, StreamExecutionEnvironment environment, Map setMap) { - for (Operation operation : getParser().parse(statement)) { - if (operation instanceof SetOperation) { - callSet((SetOperation) operation, environment, setMap); - return true; - } else if (operation instanceof ResetOperation) { - callReset((ResetOperation) operation, environment, setMap); - return true; - } - } - return false; - } - private void callSet( SetOperation setOperation, StreamExecutionEnvironment environment, Map setMap) { if (!setOperation.getKey().isPresent() || !setOperation.getValue().isPresent()) { diff --git a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 80723a5a00..4c4994aed8 100644 --- a/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.17/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -51,7 +51,9 @@ import org.apache.flink.table.operations.ddl.CreateTableOperation; import org.apache.flink.types.Row; +import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -103,6 +105,35 @@ public static CustomTableEnvironmentImpl create( return new CustomTableEnvironmentImpl(streamTableEnvironment); } + @Override + public void addJar(File... jarPath) { + Configuration configuration = this.getRootConfiguration(); + List jars = configuration.get(PipelineOptions.JARS); + if (jars == null) { + configuration.set( + PipelineOptions.JARS, + Arrays.stream(jarPath).map(File::getAbsolutePath).collect(Collectors.toList())); + } else { + CollUtil.addAll(jars, jarPath); + } + } + + @Override + public boolean parseAndLoadConfiguration(String statement, Map setMap) { + List operations = getParser().parse(statement); + for (Operation operation : operations) { + if (operation instanceof SetOperation) { + callSet((SetOperation) operation, getStreamExecutionEnvironment(), setMap); + return true; + } else if (operation instanceof ResetOperation) { + callReset((ResetOperation) operation, getStreamExecutionEnvironment(), setMap); + return true; + } + } + return false; + } + + @Override public ObjectNode getStreamGraph(String statement) { List operations = super.getParser().parse(statement); if (operations.size() != 1) { @@ -142,6 +173,7 @@ public JobPlanInfo getJobPlanInfo(List statements) { return new JobPlanInfo(JsonPlanGenerator.generatePlan(getJobGraphFromInserts(statements))); } + @Override public StreamGraph getStreamGraphFromInserts(List statements) { List modifyOperations = new ArrayList<>(); statements.stream().map(statement -> getParser().parse(statement)).forEach(operations -> { @@ -161,10 +193,12 @@ public StreamGraph getStreamGraphFromInserts(List statements) { return transOperatoinsToStreamGraph(modifyOperations); } + @Override public JobGraph getJobGraphFromInserts(List statements) { return getStreamGraphFromInserts(statements).getJobGraph(); } + @Override public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) { List operations = getParser().parse(statement); if (operations.size() != 1) { @@ -194,20 +228,6 @@ public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extr return data; } - public boolean parseAndLoadConfiguration( - String statement, StreamExecutionEnvironment environment, Map setMap) { - for (Operation operation : getParser().parse(statement)) { - if (operation instanceof SetOperation) { - callSet((SetOperation) operation, environment, setMap); - return true; - } else if (operation instanceof ResetOperation) { - callReset((ResetOperation) operation, environment, setMap); - return true; - } - } - return false; - } - private void callSet( SetOperation setOperation, StreamExecutionEnvironment environment, Map setMap) { if (!setOperation.getKey().isPresent() || !setOperation.getValue().isPresent()) { diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/apache/flink/kubernetes/KubernetesClusterDescriptor.java b/dinky-client/dinky-client-1.18/src/main/java/org/apache/flink/kubernetes/KubernetesClusterDescriptor.java new file mode 100644 index 0000000000..41b782176a --- /dev/null +++ b/dinky-client/dinky-client-1.18/src/main/java/org/apache/flink/kubernetes/KubernetesClusterDescriptor.java @@ -0,0 +1,286 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.flink.kubernetes; + +import static org.apache.flink.util.Preconditions.checkNotNull; + +import org.apache.flink.client.deployment.ClusterDeploymentException; +import org.apache.flink.client.deployment.ClusterDescriptor; +import org.apache.flink.client.deployment.ClusterRetrieveException; +import org.apache.flink.client.deployment.ClusterSpecification; +import org.apache.flink.client.deployment.application.ApplicationConfiguration; +import org.apache.flink.client.program.ClusterClient; +import org.apache.flink.client.program.ClusterClientProvider; +import org.apache.flink.client.program.PackagedProgramUtils; +import org.apache.flink.client.program.rest.RestClusterClient; +import org.apache.flink.configuration.BlobServerOptions; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.HighAvailabilityOptions; +import org.apache.flink.configuration.JobManagerOptions; +import org.apache.flink.configuration.RestOptions; +import org.apache.flink.configuration.TaskManagerOptions; +import org.apache.flink.kubernetes.configuration.KubernetesConfigOptions; +import org.apache.flink.kubernetes.configuration.KubernetesConfigOptionsInternal; +import org.apache.flink.kubernetes.configuration.KubernetesDeploymentTarget; +import org.apache.flink.kubernetes.entrypoint.KubernetesApplicationClusterEntrypoint; +import org.apache.flink.kubernetes.entrypoint.KubernetesSessionClusterEntrypoint; +import org.apache.flink.kubernetes.kubeclient.Endpoint; +import org.apache.flink.kubernetes.kubeclient.FlinkKubeClient; +import org.apache.flink.kubernetes.kubeclient.FlinkPod; +import org.apache.flink.kubernetes.kubeclient.KubernetesJobManagerSpecification; +import org.apache.flink.kubernetes.kubeclient.decorators.ExternalServiceDecorator; +import org.apache.flink.kubernetes.kubeclient.factory.KubernetesJobManagerFactory; +import org.apache.flink.kubernetes.kubeclient.parameters.KubernetesJobManagerParameters; +import org.apache.flink.kubernetes.utils.Constants; +import org.apache.flink.kubernetes.utils.KubernetesUtils; +import org.apache.flink.runtime.entrypoint.ClusterEntrypoint; +import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils; +import org.apache.flink.runtime.highavailability.nonha.standalone.StandaloneClientHAServices; +import org.apache.flink.runtime.jobgraph.JobGraph; +import org.apache.flink.runtime.jobmanager.HighAvailabilityMode; +import org.apache.flink.runtime.rpc.AddressResolution; +import org.apache.flink.util.FlinkException; +import org.apache.flink.util.Preconditions; + +import java.io.File; +import java.util.List; +import java.util.Optional; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Kubernetes specific {@link ClusterDescriptor} implementation. */ +public class KubernetesClusterDescriptor implements ClusterDescriptor { + + private static final Logger LOG = LoggerFactory.getLogger(KubernetesClusterDescriptor.class); + + private static final String CLUSTER_DESCRIPTION = "Kubernetes cluster"; + + private final Configuration flinkConfig; + + private final FlinkKubeClient client; + + private final String clusterId; + + public KubernetesClusterDescriptor(Configuration flinkConfig, FlinkKubeClient client) { + this.flinkConfig = flinkConfig; + this.client = client; + this.clusterId = + checkNotNull(flinkConfig.getString(KubernetesConfigOptions.CLUSTER_ID), "ClusterId must be specified!"); + } + + @Override + public String getClusterDescription() { + return CLUSTER_DESCRIPTION; + } + + private ClusterClientProvider createClusterClientProvider(String clusterId) { + return () -> { + final Configuration configuration = new Configuration(flinkConfig); + + final Optional restEndpoint; + restEndpoint = client.getRestEndpoint(clusterId); + + if (restEndpoint.isPresent()) { + configuration.setString(RestOptions.ADDRESS, restEndpoint.get().getAddress()); + configuration.setInteger(RestOptions.PORT, restEndpoint.get().getPort()); + } else { + throw new RuntimeException( + new ClusterRetrieveException("Could not get the rest endpoint of " + clusterId)); + } + + try { + // Flink client will always use Kubernetes service to contact with jobmanager. So we + // have a pre-configured web monitor address. Using StandaloneClientHAServices to + // create RestClusterClient is reasonable. + return new RestClusterClient<>( + configuration, + clusterId, + (effectiveConfiguration, fatalErrorHandler) -> + new StandaloneClientHAServices(getWebMonitorAddress(effectiveConfiguration))); + } catch (Exception e) { + throw new RuntimeException(new ClusterRetrieveException("Could not create the RestClusterClient.", e)); + } + }; + } + + private String getWebMonitorAddress(Configuration configuration) throws Exception { + AddressResolution resolution = AddressResolution.TRY_ADDRESS_RESOLUTION; + final KubernetesConfigOptions.ServiceExposedType serviceType = + configuration.get(KubernetesConfigOptions.REST_SERVICE_EXPOSED_TYPE); + if (serviceType.isClusterIP()) { + resolution = AddressResolution.NO_ADDRESS_RESOLUTION; + LOG.warn( + "Please note that Flink client operations(e.g. cancel, list, stop," + + " savepoint, etc.) won't work from outside the Kubernetes cluster" + + " since '{}' has been set to {}.", + KubernetesConfigOptions.REST_SERVICE_EXPOSED_TYPE.key(), + serviceType); + } + return HighAvailabilityServicesUtils.getWebMonitorAddress(configuration, resolution); + } + + @Override + public ClusterClientProvider retrieve(String clusterId) { + final ClusterClientProvider clusterClientProvider = createClusterClientProvider(clusterId); + + try (ClusterClient clusterClient = clusterClientProvider.getClusterClient()) { + LOG.info( + "Retrieve flink cluster {} successfully, JobManager Web Interface: {}", + clusterId, + clusterClient.getWebInterfaceURL()); + } + return clusterClientProvider; + } + + @Override + public ClusterClientProvider deploySessionCluster(ClusterSpecification clusterSpecification) + throws ClusterDeploymentException { + final ClusterClientProvider clusterClientProvider = + deployClusterInternal(KubernetesSessionClusterEntrypoint.class.getName(), clusterSpecification, false); + + try (ClusterClient clusterClient = clusterClientProvider.getClusterClient()) { + LOG.info( + "Create flink session cluster {} successfully, JobManager Web Interface: {}", + clusterId, + clusterClient.getWebInterfaceURL()); + } + return clusterClientProvider; + } + + @Override + public ClusterClientProvider deployApplicationCluster( + final ClusterSpecification clusterSpecification, final ApplicationConfiguration applicationConfiguration) + throws ClusterDeploymentException { + if (client.getService(ExternalServiceDecorator.getExternalServiceName(clusterId)) + .isPresent()) { + throw new ClusterDeploymentException("The Flink cluster " + clusterId + " already exists."); + } + + checkNotNull(clusterSpecification); + checkNotNull(applicationConfiguration); + + final KubernetesDeploymentTarget deploymentTarget = KubernetesDeploymentTarget.fromConfig(flinkConfig); + if (KubernetesDeploymentTarget.APPLICATION != deploymentTarget) { + throw new ClusterDeploymentException("Couldn't deploy Kubernetes Application Cluster." + + " Expected deployment.target=" + + KubernetesDeploymentTarget.APPLICATION.getName() + + " but actual one was \"" + + deploymentTarget + + "\""); + } + + applicationConfiguration.applyToConfiguration(flinkConfig); + + // No need to do pipelineJars validation if it is a PyFlink job. + if (!(PackagedProgramUtils.isPython(applicationConfiguration.getApplicationClassName()) + || PackagedProgramUtils.isPython(applicationConfiguration.getProgramArguments()))) { + final List pipelineJars = KubernetesUtils.checkJarFileForApplicationMode(flinkConfig); + Preconditions.checkArgument(pipelineJars.size() == 1, "Should only have one jar"); + } + + final ClusterClientProvider clusterClientProvider = deployClusterInternal( + KubernetesApplicationClusterEntrypoint.class.getName(), clusterSpecification, false); + + try (ClusterClient clusterClient = clusterClientProvider.getClusterClient()) { + LOG.info( + "Create flink application cluster {} successfully, JobManager Web Interface: {}", + clusterId, + clusterClient.getWebInterfaceURL()); + } + return clusterClientProvider; + } + + @Override + public ClusterClientProvider deployJobCluster( + ClusterSpecification clusterSpecification, JobGraph jobGraph, boolean detached) + throws ClusterDeploymentException { + throw new ClusterDeploymentException("Per-Job Mode not supported by Active Kubernetes deployments."); + } + + private ClusterClientProvider deployClusterInternal( + String entryPoint, ClusterSpecification clusterSpecification, boolean detached) + throws ClusterDeploymentException { + final ClusterEntrypoint.ExecutionMode executionMode = + detached ? ClusterEntrypoint.ExecutionMode.DETACHED : ClusterEntrypoint.ExecutionMode.NORMAL; + flinkConfig.setString(ClusterEntrypoint.INTERNAL_CLUSTER_EXECUTION_MODE, executionMode.toString()); + + flinkConfig.setString(KubernetesConfigOptionsInternal.ENTRY_POINT_CLASS, entryPoint); + + // Rpc, blob, rest, taskManagerRpc ports need to be exposed, so update them to fixed values. + KubernetesUtils.checkAndUpdatePortConfigOption(flinkConfig, BlobServerOptions.PORT, Constants.BLOB_SERVER_PORT); + KubernetesUtils.checkAndUpdatePortConfigOption( + flinkConfig, TaskManagerOptions.RPC_PORT, Constants.TASK_MANAGER_RPC_PORT); + KubernetesUtils.checkAndUpdatePortConfigOption(flinkConfig, RestOptions.BIND_PORT, Constants.REST_PORT); + + if (HighAvailabilityMode.isHighAvailabilityModeActivated(flinkConfig)) { + flinkConfig.setString(HighAvailabilityOptions.HA_CLUSTER_ID, clusterId); + KubernetesUtils.checkAndUpdatePortConfigOption( + flinkConfig, + HighAvailabilityOptions.HA_JOB_MANAGER_PORT_RANGE, + flinkConfig.get(JobManagerOptions.PORT)); + } + + try { + final KubernetesJobManagerParameters kubernetesJobManagerParameters = + new KubernetesJobManagerParameters(flinkConfig, clusterSpecification); + + final FlinkPod podTemplate = kubernetesJobManagerParameters + .getPodTemplateFilePath() + .map(file -> KubernetesUtils.loadPodFromTemplateFile(client, file, Constants.MAIN_CONTAINER_NAME)) + .orElse(new FlinkPod.Builder().build()); + final KubernetesJobManagerSpecification kubernetesJobManagerSpec = + KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( + podTemplate, kubernetesJobManagerParameters); + + client.createJobManagerComponent(kubernetesJobManagerSpec); + + return createClusterClientProvider(clusterId); + } catch (Exception e) { + try { + LOG.warn( + "Failed to create the Kubernetes cluster \"{}\", try to clean up the residual resources.", + clusterId); + client.stopAndCleanupCluster(clusterId); + } catch (Exception e1) { + LOG.info("Failed to stop and clean up the Kubernetes cluster \"{}\".", clusterId, e1); + } + throw new ClusterDeploymentException("Could not create Kubernetes cluster \"" + clusterId + "\".", e); + } + } + + @Override + public void killCluster(String clusterId) throws FlinkException { + try { + client.stopAndCleanupCluster(clusterId); + } catch (Exception e) { + throw new FlinkException("Could not kill Kubernetes cluster " + clusterId); + } + } + + @Override + public void close() { + try { + client.close(); + } catch (Exception e) { + LOG.error("failed to close client, exception {}", e.toString()); + } + } +} diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/AbstractCustomTableEnvironment.java b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/AbstractCustomTableEnvironment.java index 9b9fb5ed31..f2fbc8dcd4 100644 --- a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/AbstractCustomTableEnvironment.java +++ b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/AbstractCustomTableEnvironment.java @@ -24,9 +24,7 @@ import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl; -import org.apache.flink.table.delegation.ExtendedOperationExecutor; import org.apache.flink.table.delegation.Planner; -import org.apache.flink.table.planner.delegation.PlannerBase; import cn.hutool.core.util.ReflectUtil; @@ -61,13 +59,7 @@ public void injectParser(CustomParser parser) { } @Override - public void injectExtendedExecutor(CustomExtendedOperationExecutor extendedExecutor) { - PlannerBase plannerBase = (PlannerBase) getPlanner(); - ExtendedOperationExecutor extendedOperationExecutor = - new ExtendedOperationExecutorWrapper(plannerBase.getExtendedOperationExecutor(), extendedExecutor); - - ReflectUtil.setFieldValue(getPlanner(), "extendedOperationExecutor", extendedOperationExecutor); - } + public void injectExtendedExecutor(CustomExtendedOperationExecutor extendedExecutor) {} @Override public Configuration getRootConfiguration() { diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java index 6cc8e992ba..ff9506611d 100644 --- a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java +++ b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/CustomTableEnvironmentImpl.java @@ -41,6 +41,7 @@ import org.apache.flink.table.api.TableException; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.internal.TableEnvironmentImpl; +import org.apache.flink.table.catalog.CatalogDescriptor; import org.apache.flink.table.operations.CreateTableASOperation; import org.apache.flink.table.operations.ExplainOperation; import org.apache.flink.table.operations.ModifyOperation; @@ -52,7 +53,9 @@ import org.apache.flink.table.operations.ddl.CreateTableOperation; import org.apache.flink.types.Row; +import java.io.File; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -104,6 +107,35 @@ public static CustomTableEnvironmentImpl create( return new CustomTableEnvironmentImpl(streamTableEnvironment); } + @Override + public void addJar(File... jarPath) { + Configuration configuration = this.getRootConfiguration(); + List jars = configuration.get(PipelineOptions.JARS); + if (jars == null) { + configuration.set( + PipelineOptions.JARS, + Arrays.stream(jarPath).map(File::getAbsolutePath).collect(Collectors.toList())); + } else { + CollUtil.addAll(jars, jarPath); + } + } + + @Override + public boolean parseAndLoadConfiguration(String statement, Map setMap) { + List operations = getParser().parse(statement); + for (Operation operation : operations) { + if (operation instanceof SetOperation) { + callSet((SetOperation) operation, getStreamExecutionEnvironment(), setMap); + return true; + } else if (operation instanceof ResetOperation) { + callReset((ResetOperation) operation, getStreamExecutionEnvironment(), setMap); + return true; + } + } + return false; + } + + @Override public ObjectNode getStreamGraph(String statement) { List operations = super.getParser().parse(statement); if (operations.size() != 1) { @@ -143,6 +175,7 @@ public JobPlanInfo getJobPlanInfo(List statements) { return new JobPlanInfo(JsonPlanGenerator.generatePlan(getJobGraphFromInserts(statements))); } + @Override public StreamGraph getStreamGraphFromInserts(List statements) { List modifyOperations = new ArrayList<>(); statements.stream().map(statement -> getParser().parse(statement)).forEach(operations -> { @@ -162,10 +195,12 @@ public StreamGraph getStreamGraphFromInserts(List statements) { return transOperatoinsToStreamGraph(modifyOperations); } + @Override public JobGraph getJobGraphFromInserts(List statements) { return getStreamGraphFromInserts(statements).getJobGraph(); } + @Override public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails) { List operations = getParser().parse(statement); if (operations.size() != 1) { @@ -195,20 +230,6 @@ public SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extr return data; } - public boolean parseAndLoadConfiguration( - String statement, StreamExecutionEnvironment environment, Map setMap) { - for (Operation operation : getParser().parse(statement)) { - if (operation instanceof SetOperation) { - callSet((SetOperation) operation, environment, setMap); - return true; - } else if (operation instanceof ResetOperation) { - callReset((ResetOperation) operation, environment, setMap); - return true; - } - } - return false; - } - private void callSet( SetOperation setOperation, StreamExecutionEnvironment environment, Map setMap) { if (!setOperation.getKey().isPresent() || !setOperation.getValue().isPresent()) { @@ -270,4 +291,9 @@ public void executeCTAS(Operation operation) { getPlanner().translate(CollUtil.newArrayList(sinkModifyOperation)); } } + + @Override + public void createCatalog(String catalogName, CatalogDescriptor catalogDescriptor) { + getCatalogManager().createCatalog(catalogName, catalogDescriptor); + } } diff --git a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/ExtendedOperationExecutorWrapper.java b/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/ExtendedOperationExecutorWrapper.java deleted file mode 100644 index 2e6d4c3a04..0000000000 --- a/dinky-client/dinky-client-1.18/src/main/java/org/dinky/executor/ExtendedOperationExecutorWrapper.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.executor; - -import org.apache.flink.table.api.internal.TableResultInternal; -import org.apache.flink.table.delegation.ExtendedOperationExecutor; -import org.apache.flink.table.operations.Operation; - -import java.util.Optional; - -public class ExtendedOperationExecutorWrapper implements ExtendedOperationExecutor { - - private ExtendedOperationExecutor extendedOperationExecutor; - - private final CustomExtendedOperationExecutor customOperationExecutor; - - public ExtendedOperationExecutorWrapper( - ExtendedOperationExecutor extendedOperationExecutor, - CustomExtendedOperationExecutor customOperationExecutor) { - this.extendedOperationExecutor = extendedOperationExecutor; - this.customOperationExecutor = customOperationExecutor; - } - - @Override - public Optional executeOperation(Operation operation) { - Optional customResult = - (Optional) customOperationExecutor.executeOperation(operation); - if (customResult.isPresent()) { - return customResult; - } - - return extendedOperationExecutor.executeOperation(operation); - } -} diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomTableEnvironment.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomTableEnvironment.java index ff41b486ea..f2b1e67bdb 100644 --- a/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomTableEnvironment.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/executor/CustomTableEnvironment.java @@ -35,6 +35,7 @@ import org.apache.flink.table.operations.Operation; import org.apache.flink.types.Row; +import java.io.File; import java.util.Collections; import java.util.List; import java.util.Map; @@ -59,7 +60,7 @@ public interface CustomTableEnvironment SqlExplainResult explainSqlRecord(String statement, ExplainDetail... extraDetails); - boolean parseAndLoadConfiguration(String statement, StreamExecutionEnvironment config, Map setMap); + boolean parseAndLoadConfiguration(String statement, Map setMap); StreamExecutionEnvironment getStreamExecutionEnvironment(); @@ -74,4 +75,6 @@ default List getLineage(String statement) { void createTemporaryView(String s, DataStream dataStream, List columnNameList); void executeCTAS(Operation operation); + + void addJar(File... jarPath); } diff --git a/dinky-executor/src/main/java/org/dinky/parser/BaseSingleSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/BaseSingleSqlParser.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/BaseSingleSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/BaseSingleSqlParser.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/CustomParserImpl.java similarity index 89% rename from dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/CustomParserImpl.java index adb107bbfe..b96df52f8c 100644 --- a/dinky-executor/src/main/java/org/dinky/parser/CustomParserImpl.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/CustomParserImpl.java @@ -20,7 +20,10 @@ package org.dinky.parser; import org.dinky.executor.CustomParser; -import org.dinky.trans.CreateTemporalTableFunctionParseStrategy; +import org.dinky.trans.parse.AddJarSqlParseStrategy; +import org.dinky.trans.parse.CreateAggTableSelectSqlParseStrategy; +import org.dinky.trans.parse.CreateTemporalTableFunctionParseStrategy; +import org.dinky.trans.parse.SetSqlParseStrategy; import org.apache.calcite.sql.SqlNode; import org.apache.flink.table.delegation.Parser; @@ -104,8 +107,11 @@ public SqlNode validate(SqlNode sqlNode) { public static class DinkyExtendedParser extends ExtendedParser { public static final DinkyExtendedParser INSTANCE = new DinkyExtendedParser(); - private static final List PARSE_STRATEGIES = - Arrays.asList(CreateTemporalTableFunctionParseStrategy.INSTANCE); + private static final List PARSE_STRATEGIES = Arrays.asList( + AddJarSqlParseStrategy.INSTANCE, + CreateAggTableSelectSqlParseStrategy.INSTANCE, + SetSqlParseStrategy.INSTANCE, + CreateTemporalTableFunctionParseStrategy.INSTANCE); @Override public Optional parse(String statement) { diff --git a/dinky-executor/src/main/java/org/dinky/parser/DeleteSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/DeleteSqlParser.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/DeleteSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/DeleteSqlParser.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/InsertSelectSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/InsertSelectSqlParser.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/InsertSelectSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/InsertSelectSqlParser.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/InsertSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/InsertSqlParser.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/InsertSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/InsertSqlParser.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/SelectSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/SelectSqlParser.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/SelectSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/SelectSqlParser.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/SqlSegment.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/SqlSegment.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/SqlSegment.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/SqlSegment.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/SqlType.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/SqlType.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/SqlType.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/SqlType.java diff --git a/dinky-executor/src/main/java/org/dinky/parser/UpdateSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/parser/UpdateSqlParser.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/parser/UpdateSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/parser/UpdateSqlParser.java diff --git a/dinky-executor/src/main/java/org/dinky/trans/AbstractOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/AbstractOperation.java similarity index 100% rename from dinky-executor/src/main/java/org/dinky/trans/AbstractOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/AbstractOperation.java diff --git a/dinky-executor/src/main/java/org/dinky/trans/ExtendOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ExtendOperation.java similarity index 59% rename from dinky-executor/src/main/java/org/dinky/trans/ExtendOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ExtendOperation.java index cce52707f3..3a2b6d0aa8 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ExtendOperation.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ExtendOperation.java @@ -19,14 +19,27 @@ package org.dinky.trans; -import org.dinky.executor.Executor; +import org.dinky.executor.CustomTableEnvironment; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.api.ResultKind; import org.apache.flink.table.api.TableResult; +import org.apache.flink.table.api.internal.TableResultImpl; +import org.apache.flink.table.catalog.Column; +import org.apache.flink.table.catalog.ResolvedSchema; import org.apache.flink.table.operations.Operation; +import org.apache.flink.types.Row; +import java.util.Collections; import java.util.Optional; /** */ public interface ExtendOperation extends Operation { - Optional execute(Executor executor); + Optional execute(CustomTableEnvironment tEnv); + + TableResult TABLE_RESULT_OK = TableResultImpl.builder() + .resultKind(ResultKind.SUCCESS) + .schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))) + .data(Collections.singletonList(Row.of("OK"))) + .build(); } diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/AddJarOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/AddJarOperation.java similarity index 69% rename from dinky-executor/src/main/java/org/dinky/trans/ddl/AddJarOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/AddJarOperation.java index ed1319a67a..a85e95fade 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/AddJarOperation.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/AddJarOperation.java @@ -20,15 +20,20 @@ package org.dinky.trans.ddl; import org.dinky.context.FlinkUdfPathContextHolder; -import org.dinky.executor.Executor; -import org.dinky.parser.check.AddJarSqlParser; +import org.dinky.executor.CustomTableEnvironment; import org.dinky.trans.AbstractOperation; -import org.dinky.trans.Operation; +import org.dinky.trans.ExtendOperation; +import org.dinky.trans.parse.AddJarSqlParseStrategy; import org.apache.flink.table.api.TableResult; -/** @since 0.7.0 */ -public class AddJarOperation extends AbstractOperation implements Operation { +import java.util.Arrays; +import java.util.Optional; + +/** + * @since 0.7.0 + */ +public class AddJarOperation extends AbstractOperation implements ExtendOperation { private static final String KEY_WORD = "ADD CUSTOMJAR"; @@ -39,21 +44,13 @@ public AddJarOperation(String statement) { public AddJarOperation() {} @Override - public String getHandle() { - return KEY_WORD; + public Optional execute(CustomTableEnvironment tEnv) { + Arrays.stream(AddJarSqlParseStrategy.getInfo(statement)).forEach(FlinkUdfPathContextHolder::addOtherPlugins); + return Optional.of(TABLE_RESULT_OK); } @Override - public Operation create(String statement) { - return new AddJarOperation(statement); - } - - @Override - public TableResult build(Executor executor) { - return null; - } - - public void init() { - AddJarSqlParser.getAllFilePath(statement).forEach(FlinkUdfPathContextHolder::addOtherPlugins); + public String asSummaryString() { + return statement; } } diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java new file mode 100644 index 0000000000..c1cf29df8f --- /dev/null +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java @@ -0,0 +1,172 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.trans.ddl; + +import static org.apache.flink.table.api.Expressions.$; + +import org.dinky.executor.CustomTableEnvironment; +import org.dinky.trans.AbstractOperation; +import org.dinky.trans.ExtendOperation; +import org.dinky.trans.parse.CreateAggTableSelectSqlParseStrategy; + +import org.apache.commons.lang3.StringUtils; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.TableResult; + +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import cn.hutool.core.collection.CollUtil; + +/** + * CreateAggTableOperation + * + * @since 2021/6/13 19:24 + */ +public class CreateAggTableOperation extends AbstractOperation implements ExtendOperation { + + public CreateAggTableOperation() {} + + public CreateAggTableOperation(String statement) { + super(statement); + } + + @Override + public Optional execute(CustomTableEnvironment tEnv) { + AggTable aggTable = CreateAggTableSelectSqlParseStrategy.getInfo(statement); + Table source = tEnv.sqlQuery("select * from " + aggTable.getTable()); + List wheres = aggTable.getWheres(); + if (wheres != null && CollUtil.isNotEmpty(wheres)) { + for (String s : wheres) { + source = source.filter($(s)); + } + } + Table sink = source.groupBy($(aggTable.getGroupBy())) + .flatAggregate($(aggTable.getAggBy())) + .select($(aggTable.getColumns())); + tEnv.createTemporaryView(aggTable.getName(), sink); + return Optional.of(TABLE_RESULT_OK); + } + + @Override + public String asSummaryString() { + return null; + } + + public static class AggTable { + + private String statement; + private String name; + private String columns; + private String table; + private List wheres; + private String groupBy; + private String aggBy; + + public AggTable( + String statement, + String name, + String columns, + String table, + List wheres, + String groupBy, + String aggBy) { + this.statement = statement; + this.name = name; + this.columns = columns; + this.table = table; + this.wheres = wheres; + this.groupBy = groupBy; + this.aggBy = aggBy; + } + + public static AggTable build(String statement, Map> map) { + return new AggTable( + statement, + getString(map, "CREATE AGGTABLE"), + getString(map, "SELECT"), + getString(map, "FROM"), + map.get("WHERE"), + getString(map, "GROUP BY"), + getString(map, "AGG BY")); + } + + private static String getString(Map> map, String key) { + return StringUtils.join(map.get(key), ","); + } + + public String getStatement() { + return statement; + } + + public void setStatement(String statement) { + this.statement = statement; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getColumns() { + return columns; + } + + public void setColumns(String columns) { + this.columns = columns; + } + + public String getTable() { + return table; + } + + public void setTable(String table) { + this.table = table; + } + + public List getWheres() { + return wheres; + } + + public void setWheres(List wheres) { + this.wheres = wheres; + } + + public String getGroupBy() { + return groupBy; + } + + public void setGroupBy(String groupBy) { + this.groupBy = groupBy; + } + + public String getAggBy() { + return aggBy; + } + + public void setAggBy(String aggBy) { + this.aggBy = aggBy; + } + } +} diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java similarity index 80% rename from dinky-executor/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java index 52a0691795..57dee70441 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/CreateTemporalTableFunctionOperation.java @@ -20,12 +20,9 @@ package org.dinky.trans.ddl; import org.dinky.executor.CustomTableEnvironment; -import org.dinky.executor.CustomTableEnvironmentImpl; -import org.dinky.executor.CustomTableResultImpl; -import org.dinky.executor.Executor; import org.dinky.trans.AbstractOperation; -import org.dinky.trans.CreateTemporalTableFunctionParseStrategy; import org.dinky.trans.ExtendOperation; +import org.dinky.trans.parse.CreateTemporalTableFunctionParseStrategy; import org.apache.flink.table.api.TableResult; import org.apache.flink.table.expressions.Expression; @@ -48,22 +45,19 @@ public String asSummaryString() { } @Override - public Optional execute(Executor executor) { + public Optional execute(CustomTableEnvironment tEnv) { TemporalTable temporalTable = TemporalTable.build(statement); - CustomTableEnvironment env = executor.getCustomTableEnvironment(); - CustomTableEnvironmentImpl customTableEnvironmentImpl = ((CustomTableEnvironmentImpl) env); Expression timeColumn = new ValueLiteralExpression(temporalTable.getTimeColumn()); Expression targetColumn = new ValueLiteralExpression(temporalTable.getTargetColumn()); - TemporalTableFunction ttf = customTableEnvironmentImpl - .from(temporalTable.getTableName()) - .createTemporalTableFunction(timeColumn, targetColumn); + TemporalTableFunction ttf = + tEnv.from(temporalTable.getTableName()).createTemporalTableFunction(timeColumn, targetColumn); - if (temporalTable.getFunctionType().equalsIgnoreCase("TEMPORARY SYSTEM")) { - customTableEnvironmentImpl.createTemporarySystemFunction(temporalTable.getFunctionName(), ttf); + if ("TEMPORARY SYSTEM".equalsIgnoreCase(temporalTable.getFunctionType())) { + tEnv.createTemporarySystemFunction(temporalTable.getFunctionName(), ttf); } else { - customTableEnvironmentImpl.createTemporaryFunction(temporalTable.getFunctionName(), ttf); + tEnv.createTemporaryFunction(temporalTable.getFunctionName(), ttf); } - return Optional.of(CustomTableResultImpl.TABLE_RESULT_OK); + return Optional.of(TABLE_RESULT_OK); } public static class TemporalTable { @@ -84,7 +78,7 @@ private TemporalTable( String targetColumn, String tableName) { this.functionType = functionType; - this.exists = exists.trim().toUpperCase().equals("IF NOT EXISTS"); + this.exists = "IF NOT EXISTS".equalsIgnoreCase(exists.trim()); this.statement = statement; this.functionName = functionName; this.tableName = tableName; diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java similarity index 76% rename from dinky-executor/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java index ce00d6093c..31a21e6d55 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/PrintTableOperation.java @@ -19,25 +19,24 @@ package org.dinky.trans.ddl; -import org.dinky.executor.Executor; +import org.dinky.executor.CustomTableEnvironment; import org.dinky.trans.AbstractOperation; -import org.dinky.trans.Operation; +import org.dinky.trans.ExtendOperation; import org.apache.flink.table.api.TableResult; -public class PrintTableOperation extends AbstractOperation implements Operation { - @Override - public String getHandle() { - return "Print"; - } +import java.util.Optional; + +public class PrintTableOperation extends AbstractOperation implements ExtendOperation { + public PrintTableOperation() {} @Override - public Operation create(String statement) { - return null; + public Optional execute(CustomTableEnvironment tEnv) { + return Optional.empty(); } @Override - public TableResult build(Executor executor) { + public String asSummaryString() { return null; } } diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/SetOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/SetOperation.java similarity index 64% rename from dinky-executor/src/main/java/org/dinky/trans/ddl/SetOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/SetOperation.java index 0bd7de8bb8..b1229f0b90 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/SetOperation.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/ddl/SetOperation.java @@ -20,27 +20,27 @@ package org.dinky.trans.ddl; import org.dinky.assertion.Asserts; -import org.dinky.executor.Executor; -import org.dinky.parser.SingleSqlParserFactory; +import org.dinky.executor.CustomTableEnvironment; import org.dinky.trans.AbstractOperation; -import org.dinky.trans.Operation; +import org.dinky.trans.ExtendOperation; +import org.dinky.trans.parse.SetSqlParseStrategy; import org.apache.commons.lang3.StringUtils; import org.apache.flink.configuration.Configuration; +import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableResult; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; /** * SetOperation * * @since 2021/10/21 19:56 */ -public class SetOperation extends AbstractOperation implements Operation { - - private static final String KEY_WORD = "SET"; +public class SetOperation extends AbstractOperation implements ExtendOperation { public SetOperation() {} @@ -49,34 +49,30 @@ public SetOperation(String statement) { } @Override - public String getHandle() { - return KEY_WORD; - } - - @Override - public Operation create(String statement) { - return new SetOperation(statement); - } - - @Override - public TableResult build(Executor executor) { + public Optional execute(CustomTableEnvironment tEnv) { try { if (null != Class.forName("org.apache.log4j.Logger")) { - executor.parseAndLoadConfiguration(statement); - return null; + tEnv.parseAndLoadConfiguration(statement, new HashMap<>()); + return Optional.of(TABLE_RESULT_OK); } } catch (ClassNotFoundException e) { - e.printStackTrace(); + logger.error("Class not found: org.apache.log4j.Logger"); } - Map> map = SingleSqlParserFactory.generateParser(statement); + Map> map = SetSqlParseStrategy.getInfo(statement); if (Asserts.isNotNullMap(map) && map.size() == 2) { Map confMap = new HashMap<>(); confMap.put(StringUtils.join(map.get("SET"), "."), StringUtils.join(map.get("="), ",")); - executor.getCustomTableEnvironment().getConfig().addConfiguration(Configuration.fromMap(confMap)); + TableConfig config = tEnv.getConfig(); + config.addConfiguration(Configuration.fromMap(confMap)); Configuration configuration = Configuration.fromMap(confMap); - executor.getExecutionConfig().configure(configuration, null); - executor.getCustomTableEnvironment().getConfig().addConfiguration(configuration); + tEnv.getStreamExecutionEnvironment().getConfig().configure(configuration, null); + config.addConfiguration(configuration); } + return Optional.of(TABLE_RESULT_OK); + } + + @Override + public String asSummaryString() { return null; } } diff --git a/dinky-executor/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java similarity index 85% rename from dinky-executor/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java index 3f1cf4c6e8..30c92b861b 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/dml/ExecuteJarOperation.java @@ -19,11 +19,10 @@ package org.dinky.trans.dml; -import org.dinky.executor.CustomTableResultImpl; -import org.dinky.executor.Executor; +import org.dinky.executor.CustomTableEnvironment; import org.dinky.trans.AbstractOperation; -import org.dinky.trans.ExecuteJarParseStrategy; import org.dinky.trans.ExtendOperation; +import org.dinky.trans.parse.ExecuteJarParseStrategy; import org.dinky.utils.RunTimeUtil; import org.apache.flink.api.dag.Pipeline; @@ -51,17 +50,17 @@ public ExecuteJarOperation(String statement) { } @Override - public Optional execute(Executor executor) { + public Optional execute(CustomTableEnvironment tEnv) { try { - executor.getStreamExecutionEnvironment().execute(getStreamGraph(executor)); + tEnv.getStreamExecutionEnvironment().execute(getStreamGraph(tEnv)); } catch (Exception e) { throw new RuntimeException(e); } - return Optional.of(CustomTableResultImpl.TABLE_RESULT_OK); + return Optional.of(TABLE_RESULT_OK); } - protected StreamGraph getStreamGraph(Executor executor) { + protected StreamGraph getStreamGraph(CustomTableEnvironment tEnv) { JarSubmitParam submitParam = JarSubmitParam.build(statement); SavepointRestoreSettings savepointRestoreSettings = StrUtil.isBlank(submitParam.getSavepointPath()) ? SavepointRestoreSettings.none() @@ -69,7 +68,7 @@ protected StreamGraph getStreamGraph(Executor executor) { submitParam.getSavepointPath(), submitParam.getAllowNonRestoredState()); PackagedProgram program; try { - Configuration configuration = executor.getTableConfig().getConfiguration(); + Configuration configuration = tEnv.getConfig().getConfiguration(); File file = FileUtil.file(submitParam.getUri()); program = PackagedProgram.newBuilder() .setJarFile(file) @@ -78,7 +77,7 @@ protected StreamGraph getStreamGraph(Executor executor) { .setSavepointRestoreSettings(savepointRestoreSettings) .setArguments(RunTimeUtil.handleCmds(submitParam.getArgs())) .build(); - executor.addJar(file); + tEnv.addJar(file); Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, configuration, 1, true); Assert.isTrue(pipeline instanceof StreamGraph, "can not translate"); return (StreamGraph) pipeline; @@ -92,8 +91,8 @@ public String asSummaryString() { return statement; } - public StreamGraph explain(Executor executor) { - return getStreamGraph(executor); + public StreamGraph explain(CustomTableEnvironment tEnv) { + return getStreamGraph(tEnv); } @Setter diff --git a/dinky-executor/src/main/java/org/dinky/parser/check/AddJarSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/AddJarSqlParseStrategy.java similarity index 74% rename from dinky-executor/src/main/java/org/dinky/parser/check/AddJarSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/AddJarSqlParseStrategy.java index 2044cda9a1..982b71099e 100644 --- a/dinky-executor/src/main/java/org/dinky/parser/check/AddJarSqlParser.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/AddJarSqlParseStrategy.java @@ -17,11 +17,15 @@ * */ -package org.dinky.parser.check; +package org.dinky.trans.parse; import org.dinky.data.exception.DinkyException; +import org.dinky.trans.ddl.AddJarOperation; import org.dinky.utils.URLUtils; +import org.apache.flink.table.operations.Operation; +import org.apache.flink.table.planner.parse.AbstractRegexParseStrategy; + import java.io.File; import java.util.HashSet; import java.util.List; @@ -33,11 +37,22 @@ import cn.hutool.core.util.ReUtil; import cn.hutool.core.util.StrUtil; -/** @since 0.7.0 */ -public class AddJarSqlParser { +/** + * @since 0.7.0 + */ +public class AddJarSqlParseStrategy extends AbstractRegexParseStrategy { private static final String ADD_JAR = "(add\\s+customjar)\\s+'(.*.jar)'"; private static final Pattern ADD_JAR_PATTERN = Pattern.compile(ADD_JAR, Pattern.CASE_INSENSITIVE); + public static final AddJarSqlParseStrategy INSTANCE = new AddJarSqlParseStrategy(); + + protected AddJarSqlParseStrategy() { + super(ADD_JAR_PATTERN); + } + + public static File[] getInfo(String statement) { + return getAllFilePath(statement).toArray(new File[0]); + } protected static List patternStatements(String[] statements) { return Stream.of(statements) @@ -46,7 +61,7 @@ protected static List patternStatements(String[] statements) { .collect(Collectors.toList()); } - public static Set getAllFilePath(String[] statements) { + public static Set getAllFilePath(String... statements) { Set fileSet = new HashSet<>(); patternStatements(statements).stream() .map(x -> ReUtil.findAll(ADD_JAR_PATTERN, x, 2).get(0)) @@ -68,4 +83,14 @@ public static Set getAllFilePath(String[] statements) { public static Set getAllFilePath(String statements) { return getAllFilePath(new String[] {statements}); } + + @Override + public Operation convert(String statement) { + return new AddJarOperation(statement); + } + + @Override + public String[] getHints() { + return new String[0]; + } } diff --git a/dinky-executor/src/main/java/org/dinky/parser/CreateAggTableSelectSqlParser.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateAggTableSelectSqlParseStrategy.java similarity index 55% rename from dinky-executor/src/main/java/org/dinky/parser/CreateAggTableSelectSqlParser.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateAggTableSelectSqlParseStrategy.java index dc498d1a4e..da89e86b85 100644 --- a/dinky-executor/src/main/java/org/dinky/parser/CreateAggTableSelectSqlParser.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateAggTableSelectSqlParseStrategy.java @@ -17,21 +17,37 @@ * */ -package org.dinky.parser; +package org.dinky.trans.parse; + +import org.dinky.parser.SqlSegment; +import org.dinky.trans.ddl.CreateAggTableOperation; +import org.dinky.utils.SqlSegmentUtil; + +import org.apache.flink.table.operations.Operation; +import org.apache.flink.table.planner.parse.AbstractRegexParseStrategy; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; /** * CreateAggTableSelectSqlParser * * @since 2021/6/14 16:56 */ -public class CreateAggTableSelectSqlParser extends BaseSingleSqlParser { +public class CreateAggTableSelectSqlParseStrategy extends AbstractRegexParseStrategy { + private static final String PATTERN_STR = "(create\\s+aggtable)(.+)(as\\s+select)(.+)"; + private static final Pattern PATTERN = Pattern.compile(PATTERN_STR, Pattern.CASE_INSENSITIVE | Pattern.DOTALL); - public CreateAggTableSelectSqlParser(String originalSql) { - super(originalSql); + public static final CreateAggTableSelectSqlParseStrategy INSTANCE = new CreateAggTableSelectSqlParseStrategy(); + + public CreateAggTableSelectSqlParseStrategy() { + super(PATTERN); } - @Override - protected void initializeSegments() { + public static CreateAggTableOperation.AggTable getInfo(String statement) { + List segments = new ArrayList<>(); segments.add(new SqlSegment("(create\\s+aggtable)(.+)(as\\s+select)", "[,]")); segments.add(new SqlSegment("(select)(.+)(from)", "[,]")); segments.add(new SqlSegment( @@ -42,5 +58,17 @@ protected void initializeSegments() { segments.add(new SqlSegment("(group\\s+by)(.+?)( order\\s+by | agg\\s+by | ENDOFSQL)", "[,]")); segments.add(new SqlSegment("(order\\s+by)(.+?)( agg\\s+by | ENDOFSQL)", "[,]")); segments.add(new SqlSegment("(agg\\s+by)(.+?)( ENDOFSQL)", "[,]")); + Map> splitSql2Segment = SqlSegmentUtil.splitSql2Segment(segments, statement); + return CreateAggTableOperation.AggTable.build(statement, splitSql2Segment); + } + + @Override + public Operation convert(String statement) { + return new CreateAggTableOperation(statement); + } + + @Override + public String[] getHints() { + return new String[0]; } } diff --git a/dinky-executor/src/main/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategy.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateTemporalTableFunctionParseStrategy.java similarity index 94% rename from dinky-executor/src/main/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategy.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateTemporalTableFunctionParseStrategy.java index fb065aa239..2d384cf071 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategy.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/CreateTemporalTableFunctionParseStrategy.java @@ -17,7 +17,7 @@ * */ -package org.dinky.trans; +package org.dinky.trans.parse; import org.dinky.trans.ddl.CreateTemporalTableFunctionOperation; @@ -51,8 +51,9 @@ protected CreateTemporalTableFunctionParseStrategy() { public static String[] getInfo(String statement) { Matcher matcher = PATTERN.matcher(statement); - matcher.find(); - + if (!matcher.find()) { + throw new IllegalArgumentException("Invalid statement: " + statement); + } String functionType = matcher.group(1) == null ? "" : matcher.group(1).trim(); String exist = matcher.group(2) == null ? "" : matcher.group(2).trim(); String functionName = matcher.group(3).trim(); diff --git a/dinky-executor/src/main/java/org/dinky/trans/ExecuteJarParseStrategy.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/ExecuteJarParseStrategy.java similarity index 98% rename from dinky-executor/src/main/java/org/dinky/trans/ExecuteJarParseStrategy.java rename to dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/ExecuteJarParseStrategy.java index 692e2bd798..353fb3bcaf 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ExecuteJarParseStrategy.java +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/ExecuteJarParseStrategy.java @@ -17,7 +17,7 @@ * */ -package org.dinky.trans; +package org.dinky.trans.parse; import org.dinky.parser.SqlSegment; import org.dinky.trans.dml.ExecuteJarOperation; diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/SetSqlParseStrategy.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/SetSqlParseStrategy.java new file mode 100644 index 0000000000..cd5fa3f8c2 --- /dev/null +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/trans/parse/SetSqlParseStrategy.java @@ -0,0 +1,67 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.trans.parse; + +import org.dinky.parser.SqlSegment; +import org.dinky.trans.ddl.SetOperation; +import org.dinky.utils.SqlSegmentUtil; + +import org.apache.flink.table.operations.Operation; +import org.apache.flink.table.planner.parse.AbstractRegexParseStrategy; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +/** + * SetSqlParser + * + * @since 2021/10/21 18:41 + */ +public class SetSqlParseStrategy extends AbstractRegexParseStrategy { + // private static final String PATTERN_STR = + // "SET(\\s+(?[^'\\s]+)\\s*=\\s*('(?[^']*)'|(?\\S+)))?"; + private static final String PATTERN_STR = "(set)(.+)"; + private static final Pattern PATTERN = Pattern.compile(PATTERN_STR, Pattern.CASE_INSENSITIVE | Pattern.DOTALL); + public static final SetSqlParseStrategy INSTANCE = new SetSqlParseStrategy(); + + public SetSqlParseStrategy() { + super(PATTERN); + } + + public static Map> getInfo(String statement) { + // SET(\s+(\S+)\s*=(.*))? + List segments = new ArrayList<>(); + segments.add(new SqlSegment("(set)\\s+(.+)(\\s*=)", "[.]")); + segments.add(new SqlSegment("(=)\\s*(.*)( ENDOFSQL)", ",")); + return SqlSegmentUtil.splitSql2Segment(segments, statement); + } + + @Override + public Operation convert(String statement) { + return new SetOperation(statement); + } + + @Override + public String[] getHints() { + return new String[0]; + } +} diff --git a/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/SqlSegmentUtil.java b/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/SqlSegmentUtil.java new file mode 100644 index 0000000000..2d841a6d7b --- /dev/null +++ b/dinky-client/dinky-client-base/src/main/java/org/dinky/utils/SqlSegmentUtil.java @@ -0,0 +1,40 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.utils; + +import org.dinky.assertion.Asserts; +import org.dinky.parser.SqlSegment; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class SqlSegmentUtil { + public static Map> splitSql2Segment(List segments, String statement) { + Map> map = new HashMap<>(); + for (SqlSegment sqlSegment : segments) { + sqlSegment.parse(statement); + if (Asserts.isNotNullString(sqlSegment.getStart())) { + map.put(sqlSegment.getType().toUpperCase(), sqlSegment.getBodyPieces()); + } + } + return map; + } +} diff --git a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java index e14cc74296..b90eab072d 100644 --- a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java +++ b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java @@ -39,8 +39,8 @@ import org.dinky.job.JobParam; import org.dinky.job.StatementParam; import org.dinky.parser.SqlType; -import org.dinky.parser.check.AddJarSqlParser; import org.dinky.trans.Operations; +import org.dinky.trans.parse.AddJarSqlParseStrategy; import org.dinky.utils.DinkyClassLoaderUtil; import org.dinky.utils.LogUtil; import org.dinky.utils.SqlUtil; @@ -116,7 +116,7 @@ public JobParam pretreatStatements(String[] statements) { } SqlType operationType = Operations.getOperationType(statement); if (operationType.equals(SqlType.ADD)) { - AddJarSqlParser.getAllFilePath(statement).forEach(FlinkUdfPathContextHolder::addOtherPlugins); + AddJarSqlParseStrategy.getAllFilePath(statement).forEach(FlinkUdfPathContextHolder::addOtherPlugins); DinkyClassLoaderContextHolder.get() .addURL(URLUtils.getURLs(FlinkUdfPathContextHolder.getOtherPluginsFiles())); } else if (operationType.equals(SqlType.ADD_JAR)) { diff --git a/dinky-core/src/main/java/org/dinky/job/JobManager.java b/dinky-core/src/main/java/org/dinky/job/JobManager.java index 70ba02f022..19995fff7f 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobManager.java +++ b/dinky-core/src/main/java/org/dinky/job/JobManager.java @@ -60,10 +60,10 @@ import org.dinky.interceptor.FlinkInterceptor; import org.dinky.interceptor.FlinkInterceptorResult; import org.dinky.parser.SqlType; -import org.dinky.parser.check.AddJarSqlParser; -import org.dinky.trans.ExecuteJarParseStrategy; import org.dinky.trans.Operations; import org.dinky.trans.dml.ExecuteJarOperation; +import org.dinky.trans.parse.AddJarSqlParseStrategy; +import org.dinky.trans.parse.ExecuteJarParseStrategy; import org.dinky.utils.DinkyClassLoaderUtil; import org.dinky.utils.JsonUtils; import org.dinky.utils.LogUtil; @@ -331,11 +331,11 @@ public StreamGraph getJarStreamGraph(String statement) throws Exception { } SqlType operationType = Operations.getOperationType(statement); if (operationType.equals(SqlType.ADD)) { - AddJarSqlParser.getAllFilePath(statement).forEach(executor::addJar); + AddJarSqlParseStrategy.getAllFilePath(statement).forEach(executor::addJar); } } Assert.notNull(executeJarOperation, () -> new DinkyException("Not found execute jar operation.")); - return executeJarOperation.explain(executor); + return executeJarOperation.explain(executor.getCustomTableEnvironment()); } @ProcessStep(type = ProcessStepType.SUBMIT_EXECUTE) diff --git a/dinky-executor/src/main/java/org/dinky/executor/CustomExtendedOperationExecutorImpl.java b/dinky-executor/src/main/java/org/dinky/executor/CustomExtendedOperationExecutorImpl.java index ed366137d7..5749bc6043 100644 --- a/dinky-executor/src/main/java/org/dinky/executor/CustomExtendedOperationExecutorImpl.java +++ b/dinky-executor/src/main/java/org/dinky/executor/CustomExtendedOperationExecutorImpl.java @@ -28,17 +28,17 @@ public class CustomExtendedOperationExecutorImpl implements CustomExtendedOperationExecutor { - private Executor executor; + private CustomTableEnvironment tEnv; - public CustomExtendedOperationExecutorImpl(Executor executor) { - this.executor = executor; + public CustomExtendedOperationExecutorImpl(CustomTableEnvironment tEnv) { + this.tEnv = tEnv; } @Override public Optional executeOperation(Operation operation) { if (operation instanceof ExtendOperation) { ExtendOperation extendOperation = (ExtendOperation) operation; - return extendOperation.execute(executor); + return extendOperation.execute(tEnv); } return Optional.empty(); diff --git a/dinky-executor/src/main/java/org/dinky/executor/Executor.java b/dinky-executor/src/main/java/org/dinky/executor/Executor.java index 3c60259c49..f7e6391dd2 100644 --- a/dinky-executor/src/main/java/org/dinky/executor/Executor.java +++ b/dinky-executor/src/main/java/org/dinky/executor/Executor.java @@ -137,7 +137,8 @@ protected void init() { CustomTableEnvironmentContext.set(tableEnvironment); tableEnvironment.injectParser( new CustomParserImpl(tableEnvironment.getPlanner().getParser())); - tableEnvironment.injectExtendedExecutor(new CustomExtendedOperationExecutorImpl(this)); + tableEnvironment.injectExtendedExecutor( + new CustomExtendedOperationExecutorImpl(this.getCustomTableEnvironment())); Configuration configuration = tableEnvironment.getConfig().getConfiguration(); if (executorConfig.isValidJobName()) { @@ -295,7 +296,7 @@ public String explainStatementSet(List statements) { } public boolean parseAndLoadConfiguration(String statement) { - return tableEnvironment.parseAndLoadConfiguration(statement, environment, setConfig); + return tableEnvironment.parseAndLoadConfiguration(statement, setConfig); } public List getLineage(String statement) { diff --git a/dinky-executor/src/main/java/org/dinky/parser/SingleSqlParserFactory.java b/dinky-executor/src/main/java/org/dinky/parser/SingleSqlParserFactory.java index c3f5c7345a..c8840e5a8c 100644 --- a/dinky-executor/src/main/java/org/dinky/parser/SingleSqlParserFactory.java +++ b/dinky-executor/src/main/java/org/dinky/parser/SingleSqlParserFactory.java @@ -43,8 +43,6 @@ public static Map> generateParser(String sql) { sql = sql.replace("\r\n", " ").replace("\n", " ") + " ENDOFSQL"; if (contains(sql, "(insert\\s+into)(.+)(select)(.+)(from)(.+)")) { tmp = new InsertSelectSqlParser(sql); - } else if (contains(sql, "(create\\s+aggtable)(.+)(as\\s+select)(.+)")) { - tmp = new CreateAggTableSelectSqlParser(sql); } else if (contains(sql, "(execute\\s+cdcsource)")) { tmp = new CreateCDCSourceSqlParser(sql); } else if (contains(sql, "(select)(.+)(from)(.+)")) { @@ -55,8 +53,6 @@ public static Map> generateParser(String sql) { tmp = new UpdateSqlParser(sql); } else if (contains(sql, "(insert\\s+into)(.+)(values)(.+)")) { tmp = new InsertSqlParser(sql); - } else if (contains(sql, "(set)(.+)")) { - tmp = new SetSqlParser(sql); } else if (contains(sql, "(show\\s+fragment)\\s+(.+)")) { tmp = new ShowFragmentParser(sql); } @@ -74,7 +70,7 @@ public static Map> generateParser(String sql) { * * @param sql:要解析的sql语句 * @param regExp:正则表达式 - * @return + * @return boolean */ private static boolean contains(String sql, String regExp) { Pattern pattern = Pattern.compile(regExp, Pattern.CASE_INSENSITIVE); diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/AggTable.java b/dinky-executor/src/main/java/org/dinky/trans/ddl/AggTable.java deleted file mode 100644 index d6cceed502..0000000000 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/AggTable.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.trans.ddl; - -import org.dinky.parser.SingleSqlParserFactory; - -import org.apache.commons.lang3.StringUtils; - -import java.util.List; -import java.util.Map; - -/** - * AggTable - * - * @since 2021/6/13 20:32 - */ -public class AggTable { - - private String statement; - private String name; - private String columns; - private String table; - private List wheres; - private String groupBy; - private String aggBy; - - public AggTable( - String statement, - String name, - String columns, - String table, - List wheres, - String groupBy, - String aggBy) { - this.statement = statement; - this.name = name; - this.columns = columns; - this.table = table; - this.wheres = wheres; - this.groupBy = groupBy; - this.aggBy = aggBy; - } - - public static AggTable build(String statement) { - Map> map = SingleSqlParserFactory.generateParser(statement); - return new AggTable( - statement, - getString(map, "CREATE AGGTABLE"), - getString(map, "SELECT"), - getString(map, "FROM"), - map.get("WHERE"), - getString(map, "GROUP BY"), - getString(map, "AGG BY")); - } - - private static String getString(Map> map, String key) { - return StringUtils.join(map.get(key), ","); - } - - public String getStatement() { - return statement; - } - - public void setStatement(String statement) { - this.statement = statement; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getColumns() { - return columns; - } - - public void setColumns(String columns) { - this.columns = columns; - } - - public String getTable() { - return table; - } - - public void setTable(String table) { - this.table = table; - } - - public List getWheres() { - return wheres; - } - - public void setWheres(List wheres) { - this.wheres = wheres; - } - - public String getGroupBy() { - return groupBy; - } - - public void setGroupBy(String groupBy) { - this.groupBy = groupBy; - } - - public String getAggBy() { - return aggBy; - } - - public void setAggBy(String aggBy) { - this.aggBy = aggBy; - } -} diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java b/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java deleted file mode 100644 index 037d6a9f49..0000000000 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateAggTableOperation.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.trans.ddl; - -import static org.apache.flink.table.api.Expressions.$; - -import org.dinky.executor.Executor; -import org.dinky.trans.AbstractOperation; -import org.dinky.trans.Operation; - -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.TableResult; - -import java.util.List; - -/** - * CreateAggTableOperation - * - * @since 2021/6/13 19:24 - */ -public class CreateAggTableOperation extends AbstractOperation implements Operation { - - private static final String KEY_WORD = "CREATE AGGTABLE"; - - public CreateAggTableOperation() {} - - public CreateAggTableOperation(String statement) { - super(statement); - } - - @Override - public String getHandle() { - return KEY_WORD; - } - - @Override - public Operation create(String statement) { - return new CreateAggTableOperation(statement); - } - - @Override - public TableResult build(Executor executor) { - AggTable aggTable = AggTable.build(statement); - Table source = executor.getCustomTableEnvironment().sqlQuery("select * from " + aggTable.getTable()); - List wheres = aggTable.getWheres(); - if (wheres != null && wheres.size() > 0) { - for (String s : wheres) { - source = source.filter($(s)); - } - } - Table sink = source.groupBy($(aggTable.getGroupBy())) - .flatAggregate($(aggTable.getAggBy())) - .select($(aggTable.getColumns())); - executor.getCustomTableEnvironment().registerTable(aggTable.getName(), sink); - return null; - } -} diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java b/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java index e2dcbc4810..c38f716ccf 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java +++ b/dinky-executor/src/main/java/org/dinky/trans/ddl/CreateCDCSourceOperation.java @@ -19,12 +19,11 @@ package org.dinky.trans.ddl; -import static org.dinky.cdc.SinkBuilderFactory.buildSinkBuilder; - import org.dinky.assertion.Asserts; import org.dinky.cdc.CDCBuilder; import org.dinky.cdc.CDCBuilderFactory; import org.dinky.cdc.SinkBuilder; +import org.dinky.cdc.SinkBuilderFactory; import org.dinky.data.model.FlinkCDCConfig; import org.dinky.data.model.Schema; import org.dinky.data.model.Table; @@ -98,7 +97,7 @@ public TableResult build(Executor executor) { CDCBuilder cdcBuilder = CDCBuilderFactory.buildCDCBuilder(config); Map> allConfigMap = cdcBuilder.parseMetaDataConfigs(); config.setSchemaFieldName(cdcBuilder.getSchemaFieldName()); - SinkBuilder sinkBuilder = buildSinkBuilder(config); + SinkBuilder sinkBuilder = SinkBuilderFactory.buildSinkBuilder(config); List schemaList = new ArrayList<>(); final List schemaNameList = cdcBuilder.getSchemaList(); final List tableRegList = cdcBuilder.getTableList(); diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/ShowFragmentOperation.java b/dinky-executor/src/main/java/org/dinky/trans/show/ShowFragmentOperation.java similarity index 98% rename from dinky-executor/src/main/java/org/dinky/trans/ddl/ShowFragmentOperation.java rename to dinky-executor/src/main/java/org/dinky/trans/show/ShowFragmentOperation.java index 879e99452e..59983ef124 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/ShowFragmentOperation.java +++ b/dinky-executor/src/main/java/org/dinky/trans/show/ShowFragmentOperation.java @@ -17,7 +17,7 @@ * */ -package org.dinky.trans.ddl; +package org.dinky.trans.show; import org.dinky.assertion.Asserts; import org.dinky.executor.Executor; diff --git a/dinky-executor/src/main/java/org/dinky/trans/ddl/ShowFragmentsOperation.java b/dinky-executor/src/main/java/org/dinky/trans/show/ShowFragmentsOperation.java similarity index 98% rename from dinky-executor/src/main/java/org/dinky/trans/ddl/ShowFragmentsOperation.java rename to dinky-executor/src/main/java/org/dinky/trans/show/ShowFragmentsOperation.java index 1dc2f52c74..af88e14f79 100644 --- a/dinky-executor/src/main/java/org/dinky/trans/ddl/ShowFragmentsOperation.java +++ b/dinky-executor/src/main/java/org/dinky/trans/show/ShowFragmentsOperation.java @@ -17,7 +17,7 @@ * */ -package org.dinky.trans.ddl; +package org.dinky.trans.show; import org.dinky.executor.Executor; import org.dinky.trans.AbstractOperation; diff --git a/dinky-executor/src/test/java/org/dinky/parser/SqlTypeTest.java b/dinky-executor/src/test/java/org/dinky/parse/SqlTypeTest.java similarity index 97% rename from dinky-executor/src/test/java/org/dinky/parser/SqlTypeTest.java rename to dinky-executor/src/test/java/org/dinky/parse/SqlTypeTest.java index 0476c81931..0c0dc2ee4e 100644 --- a/dinky-executor/src/test/java/org/dinky/parser/SqlTypeTest.java +++ b/dinky-executor/src/test/java/org/dinky/parse/SqlTypeTest.java @@ -17,7 +17,9 @@ * */ -package org.dinky.parser; +package org.dinky.parse; + +import org.dinky.parser.SqlType; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; diff --git a/dinky-executor/src/test/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategyTest.java b/dinky-executor/src/test/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategyTest.java index 2c4dcc1fe7..79ae3fd89d 100644 --- a/dinky-executor/src/test/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategyTest.java +++ b/dinky-executor/src/test/java/org/dinky/trans/CreateTemporalTableFunctionParseStrategyTest.java @@ -19,6 +19,8 @@ package org.dinky.trans; +import org.dinky.trans.parse.CreateTemporalTableFunctionParseStrategy; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; diff --git a/dinky-flink/dinky-flink-1.18/pom.xml b/dinky-flink/dinky-flink-1.18/pom.xml index eba38bb7ab..6065f7a581 100644 --- a/dinky-flink/dinky-flink-1.18/pom.xml +++ b/dinky-flink/dinky-flink-1.18/pom.xml @@ -17,7 +17,7 @@ 1.3.1 17.0 - 1.17.1 + 1.18.0 2.3.0 @@ -92,7 +92,7 @@ org.apache.flink flink-connector-kafka - ${flink.version} + 3.0.1-1.18 org.apache.flink From 8dc2431e47a1a169fc24a18d6c4b63172da21c26 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Thu, 9 Nov 2023 12:29:42 +0800 Subject: [PATCH 20/21] fix_h2_init (#2511) --- .../org/dinky/aop/WebExceptionHandler.java | 6 +- .../java/org/dinky/configure/AppConfig.java | 31 ------- .../init/DatabasePopulatorUtils.java | 80 +++++++++++++++++++ .../src/main/resources/application-h2.yml | 4 +- .../src/main/resources/db/db-h2-ddl.sql | 80 +++++++++---------- .../main/resources/mapper/UDFManageMapper.xml | 4 +- .../org/dinky/data/constant/DirConstant.java | 4 +- .../org/dinky/executor/VariableManager.java | 2 +- pom.xml | 4 +- 9 files changed, 131 insertions(+), 84 deletions(-) create mode 100644 dinky-admin/src/main/java/org/springframework/jdbc/datasource/init/DatabasePopulatorUtils.java diff --git a/dinky-admin/src/main/java/org/dinky/aop/WebExceptionHandler.java b/dinky-admin/src/main/java/org/dinky/aop/WebExceptionHandler.java index aa2a12b881..5dc5d97fdf 100644 --- a/dinky-admin/src/main/java/org/dinky/aop/WebExceptionHandler.java +++ b/dinky-admin/src/main/java/org/dinky/aop/WebExceptionHandler.java @@ -39,10 +39,9 @@ import org.springframework.validation.FieldError; import org.springframework.validation.ObjectError; import org.springframework.web.bind.MethodArgumentNotValidException; -import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; -import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestControllerAdvice; import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.ServletRequestAttributes; @@ -55,8 +54,7 @@ * * @since 2022/2/2 22:22 */ -@ControllerAdvice -@ResponseBody +@RestControllerAdvice public class WebExceptionHandler { private static final Logger logger = LoggerFactory.getLogger(WebExceptionHandler.class); diff --git a/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java b/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java index f483c64e7c..6f9e4561db 100644 --- a/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java +++ b/dinky-admin/src/main/java/org/dinky/configure/AppConfig.java @@ -72,37 +72,6 @@ public LocaleChangeInterceptor localeChangeInterceptor() { public void addInterceptors(InterceptorRegistry registry) { registry.addInterceptor(localeChangeInterceptor()); // 注册Sa-Token的路由拦截器 - - // registry.addInterceptor(new SaInterceptor(handle -> { - // // 根据路由划分模块,不同模块不同鉴权 - // SaRouter.match("/api/alertGroup/**", r -> - // StpUtil.checkPermission("alertGroup")); - // SaRouter.match("/api/alertHistory/**", r -> - // StpUtil.checkPermission("alertHistory")); - // SaRouter.match("/api/alertInstance/**", r -> - // StpUtil.checkPermission("alertInstance")); - // SaRouter.match("/api/catalogue/**", r -> - // StpUtil.checkPermission("catalogue")); - // SaRouter.match("/api/clusterConfiguration/**", r -> - // StpUtil.checkPermission("clusterConfiguration")); - // SaRouter.match("/api/cluster/**", r -> - // StpUtil.checkPermission("clusterInstance")); - // SaRouter.match("/api/database/**", r -> - // StpUtil.checkPermission("database")); - // SaRouter.match("/api/document/**", r -> - // StpUtil.checkPermission("document")); - // SaRouter.match("/api/fragment/**", r -> - // StpUtil.checkPermission("fragment")); - // SaRouter.match("/api/git/**", r -> StpUtil.checkPermission("git")); - // SaRouter.match("/api/history/**", r -> - // StpUtil.checkPermission("history")); - // - // })) - // .addPathPatterns("/api/**") - // .excludePathPatterns("/api/login") - // .excludePathPatterns("/druid/**") - // .excludePathPatterns("/openapi/**"); - registry.addInterceptor(new SaInterceptor(handler -> StpUtil.checkLogin())) .addPathPatterns("/api/**") .excludePathPatterns( diff --git a/dinky-admin/src/main/java/org/springframework/jdbc/datasource/init/DatabasePopulatorUtils.java b/dinky-admin/src/main/java/org/springframework/jdbc/datasource/init/DatabasePopulatorUtils.java new file mode 100644 index 0000000000..f56cf000d2 --- /dev/null +++ b/dinky-admin/src/main/java/org/springframework/jdbc/datasource/init/DatabasePopulatorUtils.java @@ -0,0 +1,80 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.springframework.jdbc.datasource.init; + +import java.sql.Connection; + +import javax.sql.DataSource; + +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.datasource.DataSourceUtils; +import org.springframework.util.Assert; + +import cn.hutool.core.collection.CollUtil; +import cn.hutool.db.Db; + +/** + * Utility methods for executing a {@link DatabasePopulator}. + * + * @author Juergen Hoeller + * @author Oliver Gierke + * @author Sam Brannen + * @since 3.1 + */ +public abstract class DatabasePopulatorUtils { + private static boolean isInit = true; + + /** + * Execute the given {@link DatabasePopulator} against the given {@link DataSource}. + *

                      As of Spring Framework 5.3.11, the {@link Connection} for the supplied + * {@code DataSource} will be {@linkplain Connection#commit() committed} if + * it is not configured for {@link Connection#getAutoCommit() auto-commit} and + * is not {@linkplain DataSourceUtils#isConnectionTransactional transactional}. + * + * @param populator the {@code DatabasePopulator} to execute + * @param dataSource the {@code DataSource} to execute against + * @throws DataAccessException if an error occurs, specifically a {@link ScriptException} + * @see DataSourceUtils#isConnectionTransactional(Connection, DataSource) + */ + public static void execute(DatabasePopulator populator, DataSource dataSource) throws DataAccessException { + Assert.notNull(populator, "DatabasePopulator must not be null"); + Assert.notNull(dataSource, "DataSource must not be null"); + try { + // Override this method and do not run it if the table exists(重写此方法,如果有表存在,则不运行) + if (isInit && CollUtil.isNotEmpty(Db.use(dataSource).query("show tables"))) { + return; + } + isInit = false; + Connection connection = DataSourceUtils.getConnection(dataSource); + try { + populator.populate(connection); + if (!connection.getAutoCommit() && !DataSourceUtils.isConnectionTransactional(connection, dataSource)) { + connection.commit(); + } + } finally { + DataSourceUtils.releaseConnection(connection, dataSource); + } + } catch (ScriptException ex) { + throw ex; + } catch (Throwable ex) { + throw new UncategorizedScriptException("Failed to execute database script", ex); + } + } +} diff --git a/dinky-admin/src/main/resources/application-h2.yml b/dinky-admin/src/main/resources/application-h2.yml index 37a3d158e6..5059a516df 100644 --- a/dinky-admin/src/main/resources/application-h2.yml +++ b/dinky-admin/src/main/resources/application-h2.yml @@ -31,5 +31,5 @@ spring: sql: init: schema-locations: classpath:db/db-h2-ddl.sql - mode: always - data-locations: classpath:db/db-h2-dml.sql \ No newline at end of file + data-locations: classpath:db/db-h2-dml.sql + mode: always \ No newline at end of file diff --git a/dinky-admin/src/main/resources/db/db-h2-ddl.sql b/dinky-admin/src/main/resources/db/db-h2-ddl.sql index 433035afae..698e2ecbb1 100644 --- a/dinky-admin/src/main/resources/db/db-h2-ddl.sql +++ b/dinky-admin/src/main/resources/db/db-h2-ddl.sql @@ -1,7 +1,7 @@ SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; -DROP TABLE IF EXISTS `dinky_alert_group`; + CREATE TABLE `dinky_alert_group` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `name` varchar(50) NOT null COMMENT 'alert group name', @@ -12,7 +12,7 @@ CREATE TABLE `dinky_alert_group` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_alert_history`; + CREATE TABLE `dinky_alert_history` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -25,7 +25,7 @@ CREATE TABLE `dinky_alert_history` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_alert_instance`; + CREATE TABLE `dinky_alert_instance` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `name` varchar(50) NOT null COMMENT 'alert instance name', @@ -36,7 +36,7 @@ CREATE TABLE `dinky_alert_instance` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_catalogue`; + CREATE TABLE `dinky_catalogue` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -49,7 +49,7 @@ CREATE TABLE `dinky_catalogue` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_cluster`; + CREATE TABLE `dinky_cluster` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -68,7 +68,7 @@ CREATE TABLE `dinky_cluster` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_cluster_configuration`; + CREATE TABLE `dinky_cluster_configuration` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -81,7 +81,7 @@ CREATE TABLE `dinky_cluster_configuration` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_database`; + CREATE TABLE `dinky_database` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -104,7 +104,7 @@ CREATE TABLE `dinky_database` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_flink_document`; + CREATE TABLE `dinky_flink_document` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `category` varchar(255) null DEFAULT null COMMENT 'document category', @@ -120,7 +120,7 @@ CREATE TABLE `dinky_flink_document` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update_time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_fragment`; + CREATE TABLE `dinky_fragment` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `name` varchar(50) NOT null COMMENT 'fragment name', @@ -131,7 +131,7 @@ CREATE TABLE `dinky_fragment` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_history`; + CREATE TABLE `dinky_history` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -154,7 +154,7 @@ CREATE TABLE `dinky_history` ( INDEX cluster_index2(`cluster_id`) ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_job_history`; + CREATE TABLE `dinky_job_history` ( `id` int(11) NOT null COMMENT 'id', `tenant_id` int(11) NOT null DEFAULT 1 COMMENT 'tenant id', @@ -167,7 +167,7 @@ CREATE TABLE `dinky_job_history` ( `cluster_configuration_json` json null COMMENT 'cluster config', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_job_instance`; + CREATE TABLE `dinky_job_instance` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `name` varchar(255) null DEFAULT null COMMENT 'job instance name', @@ -187,7 +187,7 @@ CREATE TABLE `dinky_job_instance` ( INDEX job_instance_task_id_idx13(`task_id`) ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_role`; + CREATE TABLE `dinky_role` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_id` int(11) NOT null COMMENT 'tenant id', @@ -200,7 +200,7 @@ CREATE TABLE `dinky_role` ( ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_savepoints`; + CREATE TABLE `dinky_savepoints` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `task_id` int(11) NOT null COMMENT 'task ID', @@ -211,7 +211,7 @@ CREATE TABLE `dinky_savepoints` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_sys_config`; + CREATE TABLE `dinky_sys_config` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `name` varchar(255) NOT null COMMENT 'configuration name', @@ -219,7 +219,7 @@ CREATE TABLE `dinky_sys_config` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_task`; + CREATE TABLE `dinky_task` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `name` varchar(255) NOT null COMMENT 'Job name', @@ -249,7 +249,7 @@ CREATE TABLE `dinky_task` ( `statement` text null DEFAULT null COMMENT 'statement' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_task_version`; + CREATE TABLE `dinky_task_version` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `task_id` int(11) NOT null COMMENT 'task ID ', @@ -262,7 +262,7 @@ CREATE TABLE `dinky_task_version` ( `task_configure` text NOT null COMMENT 'task configuration', `create_time` datetime(0) null DEFAULT null COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_tenant`; + CREATE TABLE `dinky_tenant` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `tenant_code` varchar(64) NOT null COMMENT 'tenant code', @@ -272,7 +272,7 @@ CREATE TABLE `dinky_tenant` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_udf`; + CREATE TABLE `dinky_udf` ( `id` int(11) NOT null AUTO_INCREMENT, `name` varchar(200) null DEFAULT null COMMENT 'udf name', @@ -292,7 +292,7 @@ CREATE TABLE `dinky_udf` ( `create_time` datetime(0) null DEFAULT null COMMENT 'create time', `update_time` datetime DEFAULT null ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_udf_template`; + CREATE TABLE `dinky_udf_template` ( `id` int(11) NOT null AUTO_INCREMENT, `name` varchar(100) null DEFAULT null COMMENT 'template name', @@ -305,7 +305,7 @@ CREATE TABLE `dinky_udf_template` ( ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_user`; + CREATE TABLE `dinky_user` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `username` varchar(50) NOT null COMMENT 'username', @@ -322,7 +322,7 @@ CREATE TABLE `dinky_user` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_user_role`; + CREATE TABLE `dinky_user_role` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `user_id` int(11) NOT null COMMENT 'user id', @@ -331,7 +331,7 @@ CREATE TABLE `dinky_user_role` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `dinky_user_tenant`; + CREATE TABLE `dinky_user_tenant` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'ID', `user_id` int(11) NOT null COMMENT 'user id', @@ -341,7 +341,7 @@ CREATE TABLE `dinky_user_tenant` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_column`; + CREATE TABLE `metadata_column` ( `column_name` varchar(255) NOT null COMMENT 'column name', `column_type` varchar(255) NOT null COMMENT 'column type, such as : Physical , Metadata , Computed , WATERMARK', @@ -353,7 +353,7 @@ CREATE TABLE `metadata_column` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time', `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_database`; + CREATE TABLE `metadata_database` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'id', `database_name` varchar(255) NOT null COMMENT 'database name', @@ -361,7 +361,7 @@ CREATE TABLE `metadata_database` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time', `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_database_property`; + CREATE TABLE `metadata_database_property` ( `key` varchar(255) NOT null COMMENT 'key', `value` varchar(255) null DEFAULT null COMMENT 'value', @@ -369,7 +369,7 @@ CREATE TABLE `metadata_database_property` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time', `create_time` datetime(0) NOT null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_function`; + CREATE TABLE `metadata_function` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT '主键', `function_name` varchar(255) NOT null COMMENT 'function name', @@ -379,7 +379,7 @@ CREATE TABLE `metadata_function` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time', `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_table`; + CREATE TABLE `metadata_table` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT '主键', `table_name` varchar(255) NOT null COMMENT 'table name', @@ -389,7 +389,7 @@ CREATE TABLE `metadata_table` ( `update_time` datetime(0) null DEFAULT null COMMENT 'update time', `create_time` datetime(0) null DEFAULT CURRENT_TIMESTAMP(0) COMMENT 'create time' ) ENGINE = InnoDB ROW_FORMAT = Dynamic; -DROP TABLE IF EXISTS `metadata_table_property`; + CREATE TABLE `metadata_table_property` ( `key` varchar(255) NOT null COMMENT 'key', `value` mediumtext null COMMENT 'value', @@ -403,7 +403,7 @@ CREATE TABLE `metadata_table_property` ( -- ---------------------------- -- Table structure for dinky_row_permissions -- ---------------------------- -DROP TABLE IF EXISTS `dinky_row_permissions`; + CREATE TABLE dinky_row_permissions ( id int PRIMARY KEY AUTO_INCREMENT COMMENT 'ID', role_id int NOT null COMMENT '角色ID', @@ -413,7 +413,7 @@ CREATE TABLE dinky_row_permissions ( update_time datetime null COMMENT '更新时间' ); SET FOREIGN_KEY_CHECKS = 1; -DROP TABLE IF EXISTS `dinky_git_project`; + CREATE TABLE `dinky_git_project` ( `id` bigint(20) NOT null AUTO_INCREMENT, `tenant_id` bigint(20) NOT null, @@ -438,7 +438,7 @@ CREATE TABLE `dinky_git_project` ( `update_time` datetime NOT null DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time' ) ENGINE = InnoDB; -DROP TABLE IF EXISTS dinky_metrics; + CREATE TABLE `dinky_metrics` ( `id` int(11) NOT null AUTO_INCREMENT, `task_id` int(255) DEFAULT null, @@ -453,7 +453,7 @@ CREATE TABLE `dinky_metrics` ( `update_time` datetime DEFAULT null ) ENGINE = InnoDB; -DROP TABLE IF EXISTS dinky_resources; + CREATE TABLE `dinky_resources` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'key', `file_name` varchar(64) DEFAULT null COMMENT 'file name', @@ -472,7 +472,7 @@ CREATE TABLE `dinky_resources` ( -- ---------------------------- -- Table structure for dinky_sys_login_log -- ---------------------------- -DROP TABLE IF EXISTS dinky_sys_login_log; + CREATE TABLE `dinky_sys_login_log` ( `id` int(11) NOT null AUTO_INCREMENT COMMENT 'key', `user_id` int(11) NOT null COMMENT 'user id', @@ -492,7 +492,7 @@ CREATE TABLE `dinky_sys_login_log` ( -- ---------------------------- -- Table structure for dinky_sys_operate_log -- ---------------------------- -DROP TABLE IF EXISTS `dinky_sys_operate_log`; + CREATE TABLE `dinky_sys_operate_log` ( `id` bigint NOT null AUTO_INCREMENT COMMENT 'id', `module_name` varchar(50) DEFAULT '' COMMENT 'module name', @@ -517,7 +517,7 @@ CREATE TABLE `dinky_sys_operate_log` ( -- ---------------------------- -- Table structure for dinky_sys_menu -- ---------------------------- -drop table if exists `dinky_sys_menu`; + create table `dinky_sys_menu` ( `id` bigint not null auto_increment comment ' id', `parent_id` bigint not null comment 'parent menu id', @@ -538,7 +538,7 @@ create table `dinky_sys_menu` ( -- ---------------------------- -- Table structure dinky_sys_role_menu -- ---------------------------- -drop table if exists `dinky_sys_role_menu`; + CREATE TABLE `dinky_sys_role_menu` ( `id` bigint NOT null AUTO_INCREMENT COMMENT 'id', `role_id` bigint NOT null COMMENT 'role id', @@ -554,7 +554,7 @@ CREATE TABLE `dinky_sys_role_menu` ( -- ---------------------------- -- Table structure dinky_sys_token -- ---------------------------- -drop table if exists `dinky_sys_token`; + CREATE TABLE `dinky_sys_token` ( `id` bigint NOT NULL AUTO_INCREMENT COMMENT 'id', `token_value` varchar(255) NOT NULL COMMENT 'token value', @@ -576,7 +576,7 @@ CREATE TABLE `dinky_sys_token` ( -- ---------------------------- -- Table structure dinky_sys_alert -- ---------------------------- -drop table if exists `dinky_alert_template`; + create table if not exists dinky_alert_template ( id int auto_increment @@ -588,7 +588,7 @@ create table if not exists dinky_alert_template update_time datetime null COMMENT 'update time' ); -drop table if exists `dinky_alert_rules`; + create table if not exists dinky_alert_rules ( id int auto_increment diff --git a/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml b/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml index 9ec2209aaf..3a5ddd1ad5 100644 --- a/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml +++ b/dinky-admin/src/main/resources/mapper/UDFManageMapper.xml @@ -21,10 +21,10 @@ diff --git a/dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java index d71f0cd972..c28011e21f 100644 --- a/dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java +++ b/dinky-common/src/main/java/org/dinky/data/constant/DirConstant.java @@ -21,7 +21,7 @@ import java.io.File; -import cn.hutool.core.io.FileUtil; +import cn.hutool.system.SystemUtil; /** * DirConstant @@ -31,7 +31,7 @@ public class DirConstant { public static final String FILE_SEPARATOR = File.separator; - public static final String ROOT_PATH = FileUtil.getUserHomeDir().getAbsolutePath(); + public static final String ROOT_PATH = System.getProperty(SystemUtil.USER_DIR); public static final String LOG_DIR_PATH = ROOT_PATH + FILE_SEPARATOR + "logs"; public static final String ROOT_LOG_PATH = LOG_DIR_PATH + FILE_SEPARATOR + "dinky.log"; } diff --git a/dinky-executor/src/main/java/org/dinky/executor/VariableManager.java b/dinky-executor/src/main/java/org/dinky/executor/VariableManager.java index 1a9a8efff3..10f7abe70f 100644 --- a/dinky-executor/src/main/java/org/dinky/executor/VariableManager.java +++ b/dinky-executor/src/main/java/org/dinky/executor/VariableManager.java @@ -146,7 +146,7 @@ public Object getVariable(String variableName) { return variables.get(variableName); } // use jexl to parse variable value - return ENGINE.eval(variableName, ENGINE_CONTEXT); + return ENGINE.eval(variableName, ENGINE_CONTEXT, null); } catch (Exception e) { throw new CatalogException(format("The variable of sql %s does not exist.", variableName)); } diff --git a/pom.xml b/pom.xml index 85d63e24ef..4055f29150 100644 --- a/pom.xml +++ b/pom.xml @@ -68,7 +68,7 @@ 1.3 6.2.0.Final 4.5.13 - 5.8.11 + 5.8.22 2.14.1 2.1.6 1.9.3 @@ -108,7 +108,7 @@ 2.5.0 0.10.2 1.0.0-SNAPSHOT - 1.36.0 + 1.37.0 2.12 2.12.10 From 9e59996013e87c9465cd60d32f748bd499d30e57 Mon Sep 17 00:00:00 2001 From: ZackYoung Date: Thu, 9 Nov 2023 18:48:22 +0800 Subject: [PATCH 21/21] [Feature][token]Add token persistent (#2512) * feature_add_token_persistent * feature_add_token_persistent * feature_add_token_persistent --- dinky-admin/pom.xml | 5 - .../java/org/dinky/configure/CacheConfig.java | 63 ---- .../org/dinky/configure/SaTokenConfigure.java | 5 +- .../org/dinky/controller/TokenController.java | 10 +- .../java/org/dinky/data/model/SysToken.java | 14 + .../org/dinky/service/impl/TokenService.java | 329 ++++++++++++++++++ .../dinky/service/impl/UserServiceImpl.java | 34 +- .../src/main/resources/db/db-h2-ddl.sql | 1 + .../src/main/resources/mapper/TokenMapper.xml | 9 +- dinky-web/src/models/Sse.tsx | 25 +- pom.xml | 12 - script/sql/dinky-mysql.sql | 53 +-- script/sql/dinky-pg.sql | 4 +- .../1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql | 31 +- 14 files changed, 449 insertions(+), 146 deletions(-) delete mode 100644 dinky-admin/src/main/java/org/dinky/configure/CacheConfig.java create mode 100644 dinky-admin/src/main/java/org/dinky/service/impl/TokenService.java diff --git a/dinky-admin/pom.xml b/dinky-admin/pom.xml index 7fd1b2f957..2332938453 100644 --- a/dinky-admin/pom.xml +++ b/dinky-admin/pom.xml @@ -169,11 +169,6 @@ org.springframework.boot spring-boot-starter-actuator - - - org.springframework.boot - spring-boot-starter-data-redis - org.dinky dinky-core diff --git a/dinky-admin/src/main/java/org/dinky/configure/CacheConfig.java b/dinky-admin/src/main/java/org/dinky/configure/CacheConfig.java deleted file mode 100644 index a0eb4b45a6..0000000000 --- a/dinky-admin/src/main/java/org/dinky/configure/CacheConfig.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package org.dinky.configure; - -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.redis.cache.RedisCacheConfiguration; -import org.springframework.data.redis.serializer.RedisSerializationContext; -import org.springframework.data.redis.serializer.RedisSerializer; -import org.springframework.data.redis.serializer.StringRedisSerializer; - -/** - * CacheCoonfigure - * - * @since 2022/09/24 11:23 - */ -@Configuration -@ConditionalOnProperty(prefix = "spring.cache", name = "type", havingValue = "REDIS") -public class CacheConfig { - - /** 配置Redis缓存注解的value序列化方式 */ - @Bean - public RedisCacheConfiguration cacheConfiguration() { - return RedisCacheConfiguration.defaultCacheConfig() - // 序列化为json - .serializeValuesWith(RedisSerializationContext.SerializationPair.fromSerializer(RedisSerializer.json())) - .serializeKeysWith( - RedisSerializationContext.SerializationPair.fromSerializer(new StringRedisSerializer())); - } - - // /** - // * 配置RedisTemplate的序列化方式 - // */ - // @Bean - // public RedisTemplate redisTemplate(RedisConnectionFactory factory) { - // RedisTemplate redisTemplate = new RedisTemplate(); - // redisTemplate.setConnectionFactory(factory); - // // 指定key的序列化方式:string - // redisTemplate.setKeySerializer(RedisSerializer.string()); - // // 指定value的序列化方式:json - // redisTemplate.setValueSerializer(RedisSerializer.json()); - // return redisTemplate; - // } - -} diff --git a/dinky-admin/src/main/java/org/dinky/configure/SaTokenConfigure.java b/dinky-admin/src/main/java/org/dinky/configure/SaTokenConfigure.java index ff1c155714..bf61ef83e9 100644 --- a/dinky-admin/src/main/java/org/dinky/configure/SaTokenConfigure.java +++ b/dinky-admin/src/main/java/org/dinky/configure/SaTokenConfigure.java @@ -22,14 +22,15 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import cn.dev33.satoken.jwt.StpLogicJwtForSimple; import cn.dev33.satoken.stp.StpLogic; +import cn.dev33.satoken.util.SaTokenConsts; @Configuration public class SaTokenConfigure { // Sa-Token 整合 jwt (Simple 简单模式) @Bean public StpLogic getStpLogicJwt() { - return new StpLogicJwtForSimple(); + // return new StpLogicJwtForSimple(); + return new StpLogic(SaTokenConsts.TOKEN_STYLE_RANDOM_32); } } diff --git a/dinky-admin/src/main/java/org/dinky/controller/TokenController.java b/dinky-admin/src/main/java/org/dinky/controller/TokenController.java index 93656e4266..432a7b9199 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/TokenController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/TokenController.java @@ -42,14 +42,16 @@ import cn.dev33.satoken.annotation.SaCheckPermission; import cn.dev33.satoken.annotation.SaMode; -import cn.hutool.core.lang.UUID; +import cn.dev33.satoken.stp.StpLogic; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiOperation; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -/** TokenController */ +/** + * TokenController + */ @Slf4j @Api(tags = "Token Controller") @RestController @@ -58,6 +60,7 @@ public class TokenController { private final TokenService tokenService; + private final StpLogic stpLogic; /** * get udf template list @@ -113,12 +116,13 @@ public Result deleteToken(@RequestParam Integer id) { /** * delete Token by id + * * @return {@link Result} <{@link Void}> */ @PostMapping("/buildToken") @Log(title = "Build Token", businessType = BusinessType.OTHER) @ApiOperation("Build Token") public Result buildToken() { - return Result.succeed(UUID.fastUUID().toString(true), Status.SUCCESS); + return Result.succeed(stpLogic.createTokenValue(null, null, 1, null), Status.SUCCESS); } } diff --git a/dinky-admin/src/main/java/org/dinky/data/model/SysToken.java b/dinky-admin/src/main/java/org/dinky/data/model/SysToken.java index bebb71527d..101d726ed1 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/SysToken.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/SysToken.java @@ -24,6 +24,7 @@ import java.util.Date; import java.util.List; +import com.baomidou.mybatisplus.annotation.EnumValue; import com.baomidou.mybatisplus.annotation.FieldFill; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; @@ -39,6 +40,7 @@ import io.swagger.annotations.ApiModelProperty; import lombok.AllArgsConstructor; import lombok.Data; +import lombok.Getter; import lombok.NoArgsConstructor; @Data @@ -154,4 +156,16 @@ public class SysToken implements Serializable { dataType = "List", notes = "List of timestamps indicating the time range for token expiration") private List expireTimeRange; + + private Source source; + + @Getter + @AllArgsConstructor + public enum Source { + LOGIN(1), + CUSTOM(2); + + @EnumValue + private final int type; + } } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TokenService.java b/dinky-admin/src/main/java/org/dinky/service/impl/TokenService.java new file mode 100644 index 0000000000..381f8c14db --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TokenService.java @@ -0,0 +1,329 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.dinky.service.impl; + +import org.dinky.context.UserInfoContextHolder; +import org.dinky.data.dto.UserDTO; +import org.dinky.data.model.SysToken; +import org.dinky.data.model.User; +import org.dinky.mapper.TokenMapper; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.springframework.boot.sql.init.dependency.DependsOnDatabaseInitialization; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; + +import cn.dev33.satoken.SaManager; +import cn.dev33.satoken.dao.SaTokenDao; +import cn.dev33.satoken.stp.StpLogic; +import cn.dev33.satoken.util.SaFoxUtil; +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.date.DateTime; +import cn.hutool.core.date.DateUtil; +import cn.hutool.core.util.StrUtil; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@RequiredArgsConstructor +@Slf4j +@Service +@DependsOnDatabaseInitialization +public class TokenService implements SaTokenDao { + + private final TokenMapper tokenMapper; + private final StpLogic stpLogic; + + /** + * 存储数据的集合 + */ + public Map dataMap = new ConcurrentHashMap<>(); + + /** + * 存储数据过期时间的集合(单位: 毫秒), 记录所有 key 的到期时间 (注意存储的是到期时间,不是剩余存活时间) + */ + public Map expireMap = new ConcurrentHashMap<>(); + + // ------------------------ String 读写操作 + + @Override + public String get(String key) { + clearKeyByTimeout(key); + return (String) dataMap.get(key); + } + + @Override + public void set(String key, String value, long timeout) { + if (timeout == 0 || timeout <= SaTokenDao.NOT_VALUE_EXPIRE) { + return; + } + dataMap.put(key, value); + expireMap.put( + key, + (timeout == SaTokenDao.NEVER_EXPIRE) + ? (SaTokenDao.NEVER_EXPIRE) + : (System.currentTimeMillis() + timeout * 1000)); + } + + @Override + public void update(String key, String value) { + if (getKeyTimeout(key) == SaTokenDao.NOT_VALUE_EXPIRE) { + return; + } + dataMap.put(key, value); + } + + @Override + public void delete(String key) { + String token = CollUtil.getLast(StrUtil.split(key, ":")); + dataMap.remove(key); + expireMap.remove(key); + tokenMapper.delete(new LambdaQueryWrapper().eq(SysToken::getTokenValue, token)); + } + + @Override + public long getTimeout(String key) { + return getKeyTimeout(key); + } + + @Override + public void updateTimeout(String key, long timeout) { + expireMap.put( + key, + (timeout == SaTokenDao.NEVER_EXPIRE) + ? (SaTokenDao.NEVER_EXPIRE) + : (System.currentTimeMillis() + timeout * 1000)); + } + + // ------------------------ Object 读写操作 + + @Override + public Object getObject(String key) { + clearKeyByTimeout(key); + return dataMap.get(key); + } + + @Override + public void setObject(String key, Object object, long timeout) { + if (timeout == 0 || timeout <= SaTokenDao.NOT_VALUE_EXPIRE) { + return; + } + dataMap.put(key, object); + expireMap.put( + key, + (timeout == SaTokenDao.NEVER_EXPIRE) + ? (SaTokenDao.NEVER_EXPIRE) + : (System.currentTimeMillis() + timeout * 1000)); + } + + @Override + public void updateObject(String key, Object object) { + if (getKeyTimeout(key) == SaTokenDao.NOT_VALUE_EXPIRE) { + return; + } + dataMap.put(key, object); + } + + @Override + public void deleteObject(String key) { + delete(key); + } + + @Override + public long getObjectTimeout(String key) { + return getKeyTimeout(key); + } + + @Override + public void updateObjectTimeout(String key, long timeout) { + expireMap.put( + key, + (timeout == SaTokenDao.NEVER_EXPIRE) + ? (SaTokenDao.NEVER_EXPIRE) + : (System.currentTimeMillis() + timeout * 1000)); + } + + // ------------------------ Session 读写操作 + // 使用接口默认实现 + + // --------- 会话管理 + + @Override + public List searchData(String prefix, String keyword, int start, int size, boolean sortType) { + return SaFoxUtil.searchList(expireMap.keySet(), prefix, keyword, start, size, sortType); + } + + // ------------------------ 以下是一个定时缓存的简单实现,采用:惰性检查 + 异步循环扫描 + + // --------- 过期时间相关操作 + + /** + * 如果指定的 key 已经过期,则立即清除它 + * + * @param key 指定 key + */ + void clearKeyByTimeout(String key) { + Long expirationTime = expireMap.get(key); + // 清除条件: + // 1、数据存在。 + // 2、不是 [ 永不过期 ]。 + // 3、已经超过过期时间。 + if (expirationTime != null + && expirationTime != SaTokenDao.NEVER_EXPIRE + && expirationTime < System.currentTimeMillis()) { + delete(key); + } + } + + /** + * 获取指定 key 的剩余存活时间 (单位:秒) + * + * @param key 指定 key + * @return 这个 key 的剩余存活时间 + */ + long getKeyTimeout(String key) { + // 由于数据过期检测属于惰性扫描,很可能此时这个 key 已经是过期状态了,所以这里需要先检查一下 + clearKeyByTimeout(key); + + // 获取这个 key 的过期时间 + Long expire = expireMap.get(key); + + // 如果 expire 数据不存在,说明框架没有存储这个 key,此时返回 NOT_VALUE_EXPIRE + if (expire == null) { + return SaTokenDao.NOT_VALUE_EXPIRE; + } + + // 如果 expire 被标注为永不过期,则返回 NEVER_EXPIRE + if (expire == SaTokenDao.NEVER_EXPIRE) { + return SaTokenDao.NEVER_EXPIRE; + } + + // ---- 代码至此,说明这个 key 是有过期时间的,且未过期,那么: + + // 计算剩余时间并返回 (过期时间戳 - 当前时间戳) / 1000 转秒 + long timeout = (expire - System.currentTimeMillis()) / 1000; + + // 小于零时,视为不存在 + if (timeout < 0) { + dataMap.remove(key); + expireMap.remove(key); + return SaTokenDao.NOT_VALUE_EXPIRE; + } + return timeout; + } + + // --------- 定时清理过期数据 + + /** + * 执行数据清理的线程引用 + */ + public Thread refreshThread; + + /** + * 是否继续执行数据清理的线程标记 + */ + public volatile boolean refreshFlag; + + /** + * 清理所有已经过期的 key + */ + public void refreshDataMap() { + for (String s : expireMap.keySet()) { + clearKeyByTimeout(s); + } + } + + /** + * 初始化定时任务,定时清理过期数据 + */ + public void initRefreshThread() { + + // 如果开发者配置了 <=0 的值,则不启动定时清理 + if (SaManager.getConfig().getDataRefreshPeriod() <= 0) { + return; + } + + // 启动定时刷新 + this.refreshFlag = true; + this.refreshThread = new Thread(() -> { + for (; ; ) { + try { + try { + // 如果已经被标记为结束 + if (!refreshFlag) { + return; + } + // 执行清理 + refreshDataMap(); + } catch (Exception e) { + log.error("", e); + } + // 休眠N秒 + int dataRefreshPeriod = SaManager.getConfig().getDataRefreshPeriod(); + if (dataRefreshPeriod <= 0) { + dataRefreshPeriod = 1; + } + Thread.sleep(dataRefreshPeriod * 1000L); + } catch (Exception e) { + log.error("", e); + } + } + }); + this.refreshThread.start(); + } + + /** + * 组件被安装时,开始刷新数据线程 + */ + @Override + public void init() { + DateTime now = DateUtil.date(); + List sysTokens = tokenMapper.selectList(new LambdaQueryWrapper<>()); + for (SysToken sysToken : sysTokens) { + Integer userId = sysToken.getUserId(); + dataMap.put(stpLogic.splicingKeyTokenValue(sysToken.getTokenValue()), userId.toString()); + dataMap.put(stpLogic.splicingKeyLastActiveTime(sysToken.getTokenValue()), StrUtil.toString(now.getTime())); + UserDTO userInfo = new UserDTO(); + User user = new User(); + user.setId(userId); + userInfo.setUser(user); + UserInfoContextHolder.set(userId, userInfo); + if (sysToken.getExpireType() == 1) { + expireMap.put(stpLogic.splicingKeyTokenValue(sysToken.getTokenValue()), NEVER_EXPIRE); + } else { + expireMap.put( + stpLogic.splicingKeyTokenValue(sysToken.getTokenValue()), + sysToken.getExpireEndTime().getTime()); + } + } + initRefreshThread(); + } + + /** + * 组件被卸载时,结束定时任务,不再定时清理过期数据 + */ + @Override + public void destroy() { + this.refreshFlag = false; + } +} diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java index 5d8f3d49d8..1b930846e3 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UserServiceImpl.java @@ -33,6 +33,7 @@ import org.dinky.data.model.Role; import org.dinky.data.model.RoleMenu; import org.dinky.data.model.RowPermissions; +import org.dinky.data.model.SysToken; import org.dinky.data.model.SystemConfiguration; import org.dinky.data.model.Tenant; import org.dinky.data.model.User; @@ -42,6 +43,7 @@ import org.dinky.data.params.AssignUserToTenantParams; import org.dinky.data.result.Result; import org.dinky.data.vo.UserVo; +import org.dinky.mapper.TokenMapper; import org.dinky.mapper.UserMapper; import org.dinky.mybatis.service.impl.SuperServiceImpl; import org.dinky.service.MenuService; @@ -66,6 +68,8 @@ import cn.dev33.satoken.secure.SaSecureUtil; import cn.dev33.satoken.stp.StpUtil; +import cn.hutool.core.date.DateTime; +import cn.hutool.core.date.DateUtil; import cn.hutool.core.util.RandomUtil; import cn.hutool.core.util.StrUtil; import lombok.RequiredArgsConstructor; @@ -98,6 +102,8 @@ public class UserServiceImpl extends SuperServiceImpl implemen private final RoleMenuService roleMenuService; private final MenuService menuService; + private final TokenService tokenService; + private final TokenMapper tokenMapper; @Override public Result registerUser(User user) { @@ -156,7 +162,7 @@ public Boolean removeUser(Integer id) { * * @param loginDTO a user based on the provided login credentials. * @return a Result object containing the user information if the login is successful, or an - * appropriate error status if the login fails. + * appropriate error status if the login fails. */ @Override public Result loginUser(LoginDTO loginDTO) { @@ -182,15 +188,37 @@ public Result loginUser(LoginDTO loginDTO) { } // Perform login using StpUtil (Assuming it handles the session management) - StpUtil.login(user.getId(), loginDTO.isAutoLogin()); + Integer userId = user.getId(); + StpUtil.login(userId, loginDTO.isAutoLogin()); // save login log record loginLogService.saveLoginLog(user, Status.LOGIN_SUCCESS); + insertToken(userInfo); + // Return the user information along with a success status return Result.succeed(userInfo, Status.LOGIN_SUCCESS); } + private void insertToken(UserDTO userInfo) { + Integer userId = userInfo.getUser().getId(); + SysToken sysToken = new SysToken(); + String tokenValue = StpUtil.getTokenValueByLoginId(userId); + sysToken.setTokenValue(tokenValue); + sysToken.setUserId(userId); + // todo 权限和租户暂未接入 + sysToken.setRoleId(1); + sysToken.setTenantId(1); + sysToken.setExpireType(3); + DateTime date = DateUtil.date(); + sysToken.setExpireStartTime(date); + sysToken.setExpireEndTime(DateUtil.offsetDay(date, 1)); + sysToken.setCreator(userId); + sysToken.setUpdator(userId); + sysToken.setSource(SysToken.Source.LOGIN); + tokenMapper.insert(sysToken); + } + private User localLogin(LoginDTO loginDTO) throws AuthException { // Get user from local database by username User user = getUserByUsername(loginDTO.getUsername()); @@ -476,7 +504,7 @@ public Result resetPassword(Integer userId) { * @param userId * @return */ - private UserDTO buildUserInfo(Integer userId) { + public UserDTO buildUserInfo(Integer userId) { User user = getById(userId); if (Asserts.isNull(user)) { diff --git a/dinky-admin/src/main/resources/db/db-h2-ddl.sql b/dinky-admin/src/main/resources/db/db-h2-ddl.sql index 698e2ecbb1..708a716e4f 100644 --- a/dinky-admin/src/main/resources/db/db-h2-ddl.sql +++ b/dinky-admin/src/main/resources/db/db-h2-ddl.sql @@ -568,6 +568,7 @@ CREATE TABLE `dinky_sys_token` ( `update_time` datetime NOT NULL COMMENT 'modify time', `creator` bigint DEFAULT NULL COMMENT '创建人', `updator` bigint DEFAULT NULL COMMENT '修改人', + `source` tinyint(2) DEFAULT NULL COMMENT '1:login 2:custom', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COMMENT='token management'; diff --git a/dinky-admin/src/main/resources/mapper/TokenMapper.xml b/dinky-admin/src/main/resources/mapper/TokenMapper.xml index 0b9ee212a6..4eedfee080 100644 --- a/dinky-admin/src/main/resources/mapper/TokenMapper.xml +++ b/dinky-admin/src/main/resources/mapper/TokenMapper.xml @@ -4,10 +4,10 @@ diff --git a/dinky-web/src/models/Sse.tsx b/dinky-web/src/models/Sse.tsx index e15d26823e..8acc603a83 100644 --- a/dinky-web/src/models/Sse.tsx +++ b/dinky-web/src/models/Sse.tsx @@ -1,19 +1,19 @@ /* * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. * */ @@ -41,7 +41,6 @@ export default () => { const para = { sessionKey: uuidRef.current, topics: topics }; await postAll('api/sse/subscribeTopic', para).catch((e) => ErrorMessage(e)); }; - const reconnectSse = () => { const sseUrl = '/api/sse/connect?sessionKey=' + uuidRef.current; eventSource?.close(); diff --git a/pom.xml b/pom.xml index 4055f29150..87d7cb9601 100644 --- a/pom.xml +++ b/pom.xml @@ -372,18 +372,6 @@ sa-token-spring-boot-starter ${sa-token.version} - - - cn.dev33 - sa-token-dao-redis-jackson - ${sa-token.version} - - - redis.clients - jedis - ${jedis.version} - - org.dinky dinky-core diff --git a/script/sql/dinky-mysql.sql b/script/sql/dinky-mysql.sql index 581b430c61..11bd95571c 100644 --- a/script/sql/dinky-mysql.sql +++ b/script/sql/dinky-mysql.sql @@ -1841,29 +1841,6 @@ CREATE TABLE `dinky_sys_role_menu` ( UNIQUE KEY `un_role_menu_inx` (`role_id`,`menu_id`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; - --- ---------------------------- --- Table structure dinky_sys_token --- ---------------------------- -drop table if exists `dinky_sys_token`; -CREATE TABLE `dinky_sys_token` ( - `id` bigint NOT NULL AUTO_INCREMENT COMMENT 'id', - `token_value` varchar(255) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'token value', - `user_id` bigint NOT NULL COMMENT 'user id', - `role_id` bigint NOT NULL COMMENT 'role id', - `tenant_id` bigint NOT NULL COMMENT 'tenant id', - `expire_type` tinyint NOT NULL COMMENT '1: never expire, 2: expire after a period of time, 3: expire at a certain time', - `expire_start_time` datetime DEFAULT NULL COMMENT 'expire start time ,when expire_type = 3 , it is the start time of the period', - `expire_end_time` datetime DEFAULT NULL COMMENT 'expire end time ,when expire_type = 2,3 , it is the end time of the period', - `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime NOT NULL COMMENT 'modify time', - `creator` bigint DEFAULT NULL COMMENT '创建人', - `updator` bigint DEFAULT NULL COMMENT '修改人', - PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci COMMENT='token management'; - - - -- ---------------------------- -- Table structure dinky_sys_alert -- ---------------------------- @@ -1918,9 +1895,7 @@ INSERT INTO dinky_alert_template VALUES (1, 'Default', ' [Go toTask Web](http://${taskUrl}) ', 1, null, null); -COMMIT; -SET FOREIGN_KEY_CHECKS = 1; CREATE TABLE `dinky_udf_manage` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(50) DEFAULT NULL COMMENT 'udf name', @@ -1932,4 +1907,30 @@ CREATE TABLE `dinky_udf_manage` ( `update_time` datetime DEFAULT NULL COMMENT 'update time', PRIMARY KEY (`id`) USING BTREE, KEY `name,resources_id` (`name`,`resources_id`) USING BTREE -) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC COMMENT='udf'; \ No newline at end of file +) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC COMMENT='udf'; + +-- ---------------------------- +-- Table structure dinky_sys_token +-- ---------------------------- +drop table if exists `dinky_sys_token`; +CREATE TABLE `dinky_sys_token` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'id', + `token_value` varchar(255) NOT NULL COMMENT 'token value', + `user_id` bigint(20) NOT NULL COMMENT 'user id', + `role_id` bigint(20) NOT NULL COMMENT 'role id', + `tenant_id` bigint(20) NOT NULL COMMENT 'tenant id', + `expire_type` tinyint(4) NOT NULL COMMENT '1: never expire, 2: expire after a period of time, 3: expire at a certain time', + `expire_start_time` datetime DEFAULT NULL COMMENT 'expire start time ,when expire_type = 3 , it is the start time of the period', + `expire_end_time` datetime DEFAULT NULL COMMENT 'expire end time ,when expire_type = 2,3 , it is the end time of the period', + `create_time` datetime NOT NULL COMMENT 'create time', + `update_time` datetime NOT NULL COMMENT 'modify time', + `creator` bigint(20) DEFAULT NULL COMMENT '创建人', + `updator` bigint(20) DEFAULT NULL COMMENT '修改人', + `source` tinyint(2) DEFAULT NULL COMMENT '1:login 2:custom', + PRIMARY KEY (`id`), + UNIQUE KEY `token_value` (`token_value`) USING BTREE, + KEY `source` (`source`) USING HASH +) ENGINE=InnoDB AUTO_INCREMENT=22 DEFAULT CHARSET=utf8mb4 COMMENT='token management'; + +COMMIT; +SET FOREIGN_KEY_CHECKS = 1; diff --git a/script/sql/dinky-pg.sql b/script/sql/dinky-pg.sql index 7d7bd15134..c17d2b8db7 100644 --- a/script/sql/dinky-pg.sql +++ b/script/sql/dinky-pg.sql @@ -2663,7 +2663,8 @@ create table public.dinky_sys_token ( create_time timestamp without time zone, update_time timestamp without time zone, creator bigint, - updator bigint + updator bigint, + source bigint ); comment on table public.dinky_sys_token is 'token table'; comment on column public.dinky_sys_token.id is 'id'; @@ -2678,6 +2679,7 @@ comment on column public.dinky_sys_token.create_time is 'create time'; comment on column public.dinky_sys_token.update_time is 'modify time'; comment on column public.dinky_sys_token.creator is 'creat user'; comment on column public.dinky_sys_token.updator is 'modify user'; +comment on column public.dinky_sys_token.source is 'source'; -- ---------------------------- diff --git a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql index f9f90b0ade..076665bdb2 100644 --- a/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql +++ b/script/sql/upgrade/1.0.0-SNAPSHOT_schema/mysql/dinky_ddl.sql @@ -275,20 +275,23 @@ create table if not exists dinky_alert_rules -- Table structure dinky_sys_token -- ---------------------------- CREATE TABLE `dinky_sys_token` ( - `id` bigint NOT NULL AUTO_INCREMENT COMMENT 'id', - `token_value` varchar(255) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'token value', - `user_id` bigint NOT NULL COMMENT 'user id', - `role_id` bigint NOT NULL COMMENT 'role id', - `tenant_id` bigint NOT NULL COMMENT 'tenant id', - `expire_type` tinyint NOT NULL COMMENT '1: never expire, 2: expire after a period of time, 3: expire at a certain time', - `expire_start_time` datetime DEFAULT NULL COMMENT 'expire start time ,when expire_type = 3 , it is the start time of the period', - `expire_end_time` datetime DEFAULT NULL COMMENT 'expire end time ,when expire_type = 2,3 , it is the end time of the period', - `create_time` datetime NOT NULL COMMENT 'create time', - `update_time` datetime NOT NULL COMMENT 'modify time', - `creator` bigint DEFAULT NULL COMMENT '创建人', - `updator` bigint DEFAULT NULL COMMENT '修改人', - PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci COMMENT='token management'; + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'id', + `token_value` varchar(255) NOT NULL COMMENT 'token value', + `user_id` bigint(20) NOT NULL COMMENT 'user id', + `role_id` bigint(20) NOT NULL COMMENT 'role id', + `tenant_id` bigint(20) NOT NULL COMMENT 'tenant id', + `expire_type` tinyint(4) NOT NULL COMMENT '1: never expire, 2: expire after a period of time, 3: expire at a certain time', + `expire_start_time` datetime DEFAULT NULL COMMENT 'expire start time ,when expire_type = 3 , it is the start time of the period', + `expire_end_time` datetime DEFAULT NULL COMMENT 'expire end time ,when expire_type = 2,3 , it is the end time of the period', + `create_time` datetime NOT NULL COMMENT 'create time', + `update_time` datetime NOT NULL COMMENT 'modify time', + `creator` bigint(20) DEFAULT NULL COMMENT '创建人', + `updator` bigint(20) DEFAULT NULL COMMENT '修改人', + `source` tinyint(2) DEFAULT NULL COMMENT '1:login 2:custom', + PRIMARY KEY (`id`), + UNIQUE KEY `token_value` (`token_value`) USING BTREE, + KEY `source` (`source`) USING HASH +) ENGINE=InnoDB AUTO_INCREMENT=22 DEFAULT CHARSET=utf8mb4 COMMENT='token management'; CREATE TABLE `dinky_udf_manage` ( `id` int(11) NOT NULL AUTO_INCREMENT,