Skip to content

Commit

Permalink
Merge branch 'DataLinkDC:dev' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
gaoyan1998 authored Sep 19, 2024
2 parents 45c53d9 + 2387318 commit 5f63dd8
Show file tree
Hide file tree
Showing 231 changed files with 11,169 additions and 1,006 deletions.
24 changes: 23 additions & 1 deletion .github/workflows/auto-realease-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,28 @@ jobs:
-Pprod,flink-single-version,flink-1.18,maven-central,web,fast \
--no-snapshot-updates
- name: "Build Dinky ${{ inputs.version }} with Fink 1.19 Pre Release/Release Artifact"
id: build_dinky1_19
run: |
./mvnw -B clean package \
-Dmaven.test.skip=false \
-Dspotless.check.skip=true \
-Denforcer.skip=false \
-Dmaven.javadoc.skip=true \
-Pprod,flink-single-version,flink-1.19,maven-central,web,fast \
--no-snapshot-updates
- name: "Build Dinky ${{ inputs.version }} with Fink 1.20 Pre Release/Release Artifact"
id: build_dinky1_20
run: |
./mvnw -B clean package \
-Dmaven.test.skip=false \
-Dspotless.check.skip=true \
-Denforcer.skip=false \
-Dmaven.javadoc.skip=true \
-Pprod,flink-single-version,flink-1.20,maven-central,web,fast \
--no-snapshot-updates
- name: Automatic Upload Releases to GitHub Releases Page
uses: "marvinpinto/action-automatic-releases@latest"
with:
Expand All @@ -120,4 +142,4 @@ jobs:
body: |
${{ inputs.release_notes }}
files: |
./build/dinky-release-*.tar.gz
./build/dinky-release-*.tar.gz
2 changes: 1 addition & 1 deletion .github/workflows/backend.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ jobs:
fail-fast: true
matrix:
jdk: [8, 11]
flink: [1.14, 1.15, 1.16, 1.17, 1.18, 1.19]
flink: [1.14, 1.15, 1.16, 1.17, 1.18, 1.19, 1.20]

timeout-minutes: 30
env:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
- uses: actions/checkout@v3
- name: Build documentation
run: |
cd docs && npm install --prefer-offline --no-audit --progress=false --legacy-peer-deps --registry https://repo.huaweicloud.com/repository/npm/ \
cd docs && npm install --prefer-offline --no-audit --progress=false --legacy-peer-deps \
&& npm run build
- name: Upload documentation
uses: burnett01/[email protected]
Expand All @@ -38,4 +38,4 @@ jobs:
remote_host: ${{ secrets.RSYNC_HOST }}
remote_port: ${{ secrets.RSYNC_PORT }}
remote_user: ${{ secrets.RSYNC_USER }}
remote_key: ${{ secrets.RSYNC_KEY }}
remote_key: ${{ secrets.RSYNC_KEY }}
2 changes: 2 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ Thank you to all the people who already contributed to Dinky!

## How to Deploy

See [source code compilation](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/compile.mdx) and [installation and deployment](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/deploy.mdx) for details.
See [source code compilation](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/compile.mdx) and [installation and deployment](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/normal_deploy.mdx) for details.

## Thanks

Expand Down
2 changes: 1 addition & 1 deletion README_zh_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ Dinky 是一个基于 `Apache Flink` 的实时数据开发平台,实现了敏

## 如何部署

详见 [源码编译](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/compile.mdx)[安装部署](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/deploy.mdx)
详见 [源码编译](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/compile.mdx)[安装部署](https://github.com/DataLinkDC/dinky/blob/dev/docs/docs/deploy_guide/normal_deploy.mdx)

## 感谢

Expand Down
4 changes: 0 additions & 4 deletions dinky-admin/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -355,10 +355,6 @@
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-mysql</artifactId>
</dependency>
<dependency>
<groupId>org.dinky</groupId>
<artifactId>dinky-alert-dingtalk</artifactId>
Expand Down
7 changes: 7 additions & 0 deletions dinky-admin/src/main/java/org/dinky/Dinky.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.transaction.annotation.EnableTransactionManagement;

import com.alibaba.druid.proxy.DruidDriver;

import lombok.SneakyThrows;

/**
Expand All @@ -43,6 +45,11 @@ public class Dinky {

@SneakyThrows
public static void main(String[] args) {
// Initialize the JDBC Driver, because the number of packages is very large, so it needs to be executed
// asynchronously and loaded in advance
// chinese: 初始化JDBC Driver,因为包的数量特别庞大,所以这里需要异步执行,并提前加载Driver
new Thread(DruidDriver::getInstance).start();

SpringApplication app = new SpringApplication(Dinky.class);
app.run(args);
}
Expand Down
16 changes: 8 additions & 8 deletions dinky-admin/src/main/java/org/dinky/aop/LogAspect.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.dinky.data.result.Result;
import org.dinky.service.impl.OperateLogServiceImpl;
import org.dinky.utils.IpUtils;
import org.dinky.utils.JsonUtils;
import org.dinky.utils.ServletUtils;

import org.apache.commons.lang3.StringUtils;
Expand All @@ -52,9 +53,10 @@
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.HandlerMapping;

import com.fasterxml.jackson.core.type.TypeReference;

import cn.dev33.satoken.stp.StpUtil;
import cn.hutool.extra.spring.SpringUtil;
import cn.hutool.json.JSONUtil;
import lombok.extern.slf4j.Slf4j;

/** 操作日志记录处理 */
Expand Down Expand Up @@ -100,14 +102,14 @@ protected void handleCommonLogic(final JoinPoint joinPoint, final Exception e, O

// *========数据库日志=========*//
OperateLog operLog = new OperateLog();
Result result = JSONUtil.toBean(JSONUtil.parseObj(jsonResult), Result.class);
Result<Void> result = JsonUtils.toBean(jsonResult, new TypeReference<Result<Void>>() {});
operLog.setStatus(result.isSuccess() ? BusinessStatus.SUCCESS.ordinal() : BusinessStatus.FAIL.ordinal());

// 请求的地址
String ip = IpUtils.getIpAddr(ServletUtils.getRequest());
operLog.setOperateIp(ip);
// 返回参数
operLog.setJsonResult(JSONUtil.toJsonStr(jsonResult));
operLog.setJsonResult(JsonUtils.toJsonString(jsonResult));

operLog.setOperateUrl(ServletUtils.getRequest().getRequestURI());
if (user != null) {
Expand Down Expand Up @@ -145,9 +147,8 @@ protected void handleCommonLogic(final JoinPoint joinPoint, final Exception e, O
*
* @param log 日志
* @param operLog 操作日志
* @throws Exception
*/
public void getControllerMethodDescription(JoinPoint joinPoint, Log log, OperateLog operLog) throws Exception {
public void getControllerMethodDescription(JoinPoint joinPoint, Log log, OperateLog operLog) {
// 设置action动作
operLog.setBusinessType(log.businessType().ordinal());
// 设置标题
Expand All @@ -163,7 +164,6 @@ public void getControllerMethodDescription(JoinPoint joinPoint, Log log, Operate
* 获取请求的参数,放到log中
*
* @param operLog 操作日志
* @throws Exception 异常
*/
private void setRequestValue(JoinPoint joinPoint, OperateLog operLog) {
String requestMethod = operLog.getRequestMethod();
Expand All @@ -179,7 +179,7 @@ private void setRequestValue(JoinPoint joinPoint, OperateLog operLog) {
}

/** 是否存在注解,如果存在就获取 */
private Log getAnnotationLog(JoinPoint joinPoint) throws Exception {
private Log getAnnotationLog(JoinPoint joinPoint) {
Signature signature = joinPoint.getSignature();
MethodSignature methodSignature = (MethodSignature) signature;
Method method = methodSignature.getMethod();
Expand All @@ -194,7 +194,7 @@ private Log getAnnotationLog(JoinPoint joinPoint) throws Exception {
private String argsArrayToString(Object[] paramsArray) {
return Arrays.stream(paramsArray)
.filter(o -> !isFilterObject(o))
.map(JSONUtil::toJsonStr)
.map(JsonUtils::toJsonString)
.collect(Collectors.joining(" "));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ public class DatabaseFlywayMigration {
private final FlywayProperties flywayProperties;
private final DataSourceProperties dataSourceProperties;

/**
* The reason why hikariDataSource is used here is because Dinky uses a Druid connection pool, which can cause some flyway SQL to be intercepted during execution, resulting in automatic upgrade failure
* @return dataSource
*/
private DataSource dataSource() {
HikariDataSource hikariDataSource = new HikariDataSource();
hikariDataSource.setJdbcUrl(dataSourceProperties.getUrl());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,19 @@
package org.dinky.context;

import org.dinky.function.constant.PathConstant;
import org.dinky.utils.JsonUtils;

import java.util.ArrayList;
import java.util.List;

import cn.hutool.core.io.FileUtil;
import cn.hutool.json.JSONUtil;

public class GitBuildContextHolder {
private static final List<Integer> RUNNING_LIST = new ArrayList<>();

public static void addRun(Integer id) {
RUNNING_LIST.add(id);
FileUtil.writeUtf8String(JSONUtil.toJsonStr(getAll()), PathConstant.TMP_PATH + "/build.list");
FileUtil.writeUtf8String(JsonUtils.toJsonString(getAll()), PathConstant.TMP_PATH + "/build.list");
}

public static void remove(Integer id) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,23 @@
import org.dinky.data.constant.DirConstant;
import org.dinky.data.dto.CatalogueTaskDTO;
import org.dinky.data.dto.CatalogueTreeQueryDTO;
import org.dinky.data.dto.ImportCatalogueDTO;
import org.dinky.data.enums.BusinessType;
import org.dinky.data.enums.Status;
import org.dinky.data.model.Catalogue;
import org.dinky.data.result.Result;
import org.dinky.data.vo.ExportCatalogueVO;
import org.dinky.data.vo.TreeVo;
import org.dinky.service.TaskService;
import org.dinky.service.catalogue.CatalogueService;

import java.io.File;
import java.util.List;

import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
Expand All @@ -46,6 +51,7 @@
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;

import cn.dev33.satoken.annotation.SaCheckLogin;
import cn.hutool.core.io.FileUtil;
Expand Down Expand Up @@ -250,4 +256,38 @@ public Result<Void> copyTask(@RequestBody Catalogue catalogue) {
public Result<Void> deleteCatalogueById(@CatalogueId @RequestParam Integer id) {
return catalogueService.deleteCatalogueById(id);
}

/**
* export catalogue by id
*
* @param id catalogue id
* @return {@link ResponseEntity}
*/
@GetMapping("/export")
@Log(title = "Export Catalogue", businessType = BusinessType.EXPORT)
@ApiOperation("Export Catalogue")
@ApiImplicitParam(name = "id", value = "id", required = true, dataType = "Integer", dataTypeClass = Integer.class)
public ResponseEntity<?> exportCatalogue(@RequestParam Integer id) {
ExportCatalogueVO exportCatalogueVo = catalogueService.exportCatalogue(id);
// convert the return value to file at the interface level
HttpHeaders headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=" + exportCatalogueVo.getFileName());
headers.add(HttpHeaders.CONTENT_TYPE, "application/json");
return ResponseEntity.ok().headers(headers).body(exportCatalogueVo.getDataJson());
}

/**
* import catalogue by parent id
*
* @return {@link Result}< {@link Void}>}
*/
@PostMapping("/import")
@Log(title = "Import Catalogue", businessType = BusinessType.IMPORT)
@ApiOperation("Import Catalogue")
public Result<Void> importCatalogue(MultipartHttpServletRequest request) {
// assemble dto objects and shield service requests
ImportCatalogueDTO importCatalogueDto = ImportCatalogueDTO.build(request);
catalogueService.importCatalogue(importCatalogueDto);
return Result.succeed();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,16 @@
import org.dinky.data.constant.DirConstant;
import org.dinky.data.exception.BusException;
import org.dinky.data.model.FlinkUdfManifest;
import org.dinky.data.model.ResourcesModelEnum;
import org.dinky.data.model.SystemConfiguration;
import org.dinky.data.result.Result;
import org.dinky.function.constant.PathConstant;
import org.dinky.function.util.ZipWriter;
import org.dinky.resource.BaseResourceManager;
import org.dinky.utils.JsonUtils;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.List;
Expand All @@ -37,15 +42,18 @@

import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;

import cn.dev33.satoken.annotation.SaIgnore;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.convert.Convert;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.ArrayUtil;
import cn.hutool.extra.servlet.ServletUtil;
import cn.hutool.json.JSONUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
Expand All @@ -58,7 +66,7 @@
@Api(tags = "UDF & App Jar Controller")
@RequestMapping("/download")
public class DownloadController {

// todo: Controller has injection risk
@GetMapping("downloadDepJar/{taskId}")
@ApiOperation("Download UDF Jar")
public void downloadJavaUDF(@PathVariable Integer taskId, HttpServletResponse resp) {
Expand All @@ -71,7 +79,7 @@ public void downloadJavaUDF(@PathVariable Integer taskId, HttpServletResponse re
return;
}
FlinkUdfManifest flinkUdfManifest =
JSONUtil.toBean(FileUtil.readUtf8String(depManifestFile), FlinkUdfManifest.class);
JsonUtils.toBean(FileUtil.readUtf8String(depManifestFile), FlinkUdfManifest.class);
List<String> filePath =
flinkUdfManifest.getJars().stream().map(Convert::toStr).collect(Collectors.toList());
List<String> pyFilePath =
Expand Down Expand Up @@ -100,9 +108,9 @@ public void downloadJavaUDF(@PathVariable Integer taskId, HttpServletResponse re
}

/**
* 提供docker通过http下载dinky-app.jar
* Provide Docker to download dinky-app.jar via HTTP request
*
* @param version 版本
* @param version version of dinky-app.jar
* @param resp resp
*/
@GetMapping("downloadAppJar/{version}")
Expand All @@ -117,8 +125,33 @@ public void downloadAppJar(@PathVariable String version, HttpServletResponse res

@GetMapping("downloadFromRs")
@ApiOperation("Download From Resource")
public void downloadJavaUDF(String path, HttpServletResponse resp) {
@SaIgnore
public void downloadFromRs(String path, HttpServletResponse resp) {
InputStream inputStream = BaseResourceManager.getInstance().readFile(path);
ServletUtil.write(resp, inputStream);
}

// todo: There is a risk of injection in this interface
@PostMapping("uploadFromRsByLocal")
@ApiOperation("Upload From Resource By Local")
@SaIgnore
public Result<Void> uploadFromRs(String path, @RequestParam("file") MultipartFile file) {
SystemConfiguration systemConfiguration = SystemConfiguration.getInstances();
if (!systemConfiguration.getResourcesEnable().getValue()
|| !systemConfiguration.getResourcesModel().getValue().equals(ResourcesModelEnum.LOCAL)) {
return Result.failed("resources model is not local or resources is not enable");
}

try {
File dest = new File(path);
if (!dest.getParentFile().exists()) {
dest.getParentFile().mkdirs();
}
file.transferTo(dest);
return Result.succeed();
} catch (IOException e) {
log.error("upload file failed", e);
throw new BusException("upload file failed");
}
}
}
Loading

0 comments on commit 5f63dd8

Please sign in to comment.