From 121e06bf0ae82c0c92a1651cdda1140a1c47a74f Mon Sep 17 00:00:00 2001
From: Wink <809097465@qq.com>
Date: Mon, 9 Dec 2024 10:16:01 +0800
Subject: [PATCH 1/4] [Feature-4025][*] Release 1.2.0-rc5 (#4031)
---
docs/download/dinky-1.2.0.md | 20 ++++++++++----------
pom.xml | 2 +-
2 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/docs/download/dinky-1.2.0.md b/docs/download/dinky-1.2.0.md
index 406ab0e33d..49878b6a59 100644
--- a/docs/download/dinky-1.2.0.md
+++ b/docs/download/dinky-1.2.0.md
@@ -3,17 +3,17 @@ sidebar_position: 78
title: 1.2.0 release
---
-| Dinky 版本 | Flink 版本 | 二进制程序 | Source |
-|----------|----------|-------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------|
-| 1.2.0 | 1.14 | [dinky-release-1.14-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.14-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-| 1.2.0 | 1.15 | [dinky-release-1.15-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.15-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-| 1.2.0 | 1.16 | [dinky-release-1.16-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.16-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-| 1.2.0 | 1.17 | [dinky-release-1.17-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.17-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-| 1.2.0 | 1.18 | [dinky-release-1.18-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.18-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-| 1.2.0 | 1.19 | [dinky-release-1.19-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.19-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-| 1.2.0 | 1.20 | [dinky-release-1.20-1.2.0-rc4.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.19-1.2.0-rc4.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| Dinky 版本 | Flink 版本 | 二进制程序 | Source |
+|----------|----------|-----------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------|
+| 1.2.0 | 1.14 | [dinky-release-1.14-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.14-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| 1.2.0 | 1.15 | [dinky-release-1.15-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.15-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| 1.2.0 | 1.16 | [dinky-release-1.16-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.16-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| 1.2.0 | 1.17 | [dinky-release-1.17-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.17-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| 1.2.0 | 1.18 | [dinky-release-1.18-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.18-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| 1.2.0 | 1.19 | [dinky-release-1.19-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.19-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
+| 1.2.0 | 1.20 | [dinky-release-1.20-1.2.0-rc5.tar.gz](https://github.com/DataLinkDC/dinky/releases/download/v1.2.0/dinky-release-1.20-1.2.0-rc5.tar.gz) | [Source code (zip)](https://github.com/DataLinkDC/dinky/archive/refs/tags/v1.2.0.zip) |
-## Dinky-1.2.0 发行说明
+## Dinky-1.2.0-rc5 发行说明
### 升级说明
diff --git a/pom.xml b/pom.xml
index d7602d13f7..7cca543b4e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -110,7 +110,7 @@
UTF-8
2.5.0
0.10.2
- 1.2.0-rc4
+ 1.2.0-rc5
1.37.0
2.12.10
From 97c267d30730100a9b35f523dde135fc5542682a Mon Sep 17 00:00:00 2001
From: Zzm0809 <934230207@qq.com>
Date: Mon, 9 Dec 2024 17:00:54 +0800
Subject: [PATCH 2/4] [Bug] Fix automation script path issue (#4030)
---
script/bin/auto.sh | 107 +++++++++++++--
script/bin/init_check_network.sh | 1 -
script/bin/init_cleanup.sh | 36 +++++
script/bin/init_db.sh | 1 -
script/bin/init_env.sh | 5 +-
script/bin/init_flink_dependences.sh | 31 +++--
script/bin/init_hadoop_dependences.sh | 1 -
script/bin/init_jdbc_driver.sh | 3 +-
script/bin/init_tools_main.sh | 182 +++++++++++++++++++-------
script/bin/parse_yml.sh | 63 +++++++++
10 files changed, 351 insertions(+), 79 deletions(-)
create mode 100644 script/bin/init_cleanup.sh
create mode 100644 script/bin/parse_yml.sh
diff --git a/script/bin/auto.sh b/script/bin/auto.sh
index 37b10141f3..0397a85c3d 100644
--- a/script/bin/auto.sh
+++ b/script/bin/auto.sh
@@ -2,6 +2,7 @@
# debug mode
#set -x
+
export RED='\033[31m'
export GREEN='\033[32m'
export YELLOW='\033[33m'
@@ -11,10 +12,20 @@ export CYAN='\033[36m'
export RESET='\033[0m'
+ENV_FILE="/etc/profile.d/dinky_env"
+if [ -f "${ENV_FILE}" ]; then
+ source "${ENV_FILE}"
+fi
-if [ -z "${DINKY_HOME}" ]; then
- echo -e "${RED}DINKY_HOME environment variable is not set. Attempting to determine the correct path...${RESET}"
+DB_ENV_FILE="/etc/profile.d/dinky_db"
+if [ -f "${DB_ENV_FILE}" ]; then
+ source "${DB_ENV_FILE}"
+fi
+
+source /etc/profile
+RETURN_HOME_PATH=""
+function get_home_path() {
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
@@ -22,16 +33,28 @@ if [ -z "${DINKY_HOME}" ]; then
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
- export DINKY_HOME="$(dirname "$DIR")"
+ RETURN_HOME_PATH=$(dirname "$DIR")
+}
+
+
+if [ -z "${DINKY_HOME}" ]; then
+ echo -e "${RED}DINKY_HOME environment variable is not set. Attempting to determine the correct path...${RESET}"
+ get_home_path
+ export DINKY_HOME=${RETURN_HOME_PATH}
echo -e "${GREEN}DINKY_HOME is set to: ${DINKY_HOME}${RESET}"
else
- echo -e "${GREEN}DINKY_HOME is already set to: ${DINKY_HOME}${RESET}"
+ get_home_path
+ if [ "${DINKY_HOME}" != "${RETURN_HOME_PATH}" ]; then
+ export DINKY_HOME=${RETURN_HOME_PATH}
+ echo -e "${YELLOW}DINKY_HOME is not equal to the current path, reset DINKY_HOME to: ${RETURN_HOME_PATH}${RESET}"
+ else
+ echo -e "${GREEN}DINKY_HOME is already set to: ${DINKY_HOME}${RESET}"
+ fi
fi
FLINK_VERSION=${2}
-DINKY_HOME=${DINKY_HOME:-$(cd "$(dirname "$0")"; cd ..; pwd)}
JAVA_VERSION=$(java -version 2>&1 | sed '1!d' | sed -e 's/"//g' | awk '{print $3}' | awk -F'.' '{print $1"."$2}')
APP_HOME="${DINKY_HOME}"
@@ -71,6 +94,70 @@ assertIsInputVersion() {
fi
}
+
+if [ -f "${APP_HOME}/config/application.yml" ]; then
+ result=$("${APP_HOME}"/bin/parse_yml.sh "${APP_HOME}/config/application.yml" "server.port")
+ APP_PORT=$result
+fi
+
+echo -e "${GREEN}From ${APP_HOME}/config/application.yml server.port: ${APP_PORT}${RESET}"
+
+if [ -z "$APP_PORT" ]; then
+ echo -e "${RED}Could not find server.port in configuration files, using default port 8888 ${RESET}"
+ APP_PORT=8888
+fi
+
+# Function: Check the status of the health check endpoint
+check_health() {
+ curl --silent --max-time 2 --output /dev/null --write-out "%{http_code}" "http://localhost:$APP_PORT/actuator/health"
+}
+
+
+
+format_time() {
+ local seconds=$1
+ local hours=$((seconds / 3600))
+ local minutes=$(( (seconds % 3600) / 60 ))
+ local remaining_seconds=$((seconds % 60))
+ printf "%02d:%02d:%02d" $hours $minutes $remaining_seconds
+}
+
+function wait_start_process() {
+ echo -e "${GREEN}>>>>>>>>>>>>>>>>>>>>> Starting application... <<<<<<<<<<<<<<<<<<<<<<<${RESET}"
+ local max_attempts=100
+ local attempt=0
+ local delay=0.25
+ local health_status=""
+ local success_status_codes=("200")
+ local start_time=$(date +%s)
+
+ while [ $attempt -lt $max_attempts ]; do
+ attempt=$((attempt + 1))
+ local current_time=$(date +%s)
+ local elapsed_time=$((current_time - start_time))
+ local formatted_time=$(format_time $elapsed_time)
+ health_status=$(check_health)
+ for code in "${success_status_codes[@]}"; do
+ if [ "$health_status" == "$code" ]; then
+ echo -ne "\r[==================================================] 100%\n"
+ echo -e "${GREEN}Application started completed.${RESET}"
+ return 0
+ fi
+ done
+ local progress=$((attempt * 100 / max_attempts))
+ local bar_length=50
+ local filled_length=$((progress * bar_length / 100))
+ local empty_length=$((bar_length - filled_length))
+ local bar=$(printf '>%.0s' $(seq 1 $filled_length))$(printf ' %.0s' $(seq 1 $empty_length))
+ echo -ne "\r[${bar}] ${progress}% (time consuming: ${formatted_time})"
+ sleep $delay
+ done
+ echo -ne "\r[==================================================] 100% (time consuming: ${formatted_time})\n"
+ echo -e "${RED}Application start failed. Please check the log for details.${RESET}"
+ return 1
+}
+
+
# Use FLINK_HOME:
CLASS_PATH="${APP_HOME}:${APP_HOME}/lib/*:${APP_HOME}/config:${EXTENDS_HOME}/*:${CUSTOMER_JAR_PATH}/*:${EXTENDS_HOME}/flink${FLINK_VERSION}/dinky/*:${EXTENDS_HOME}/flink${FLINK_VERSION}/flink/*:${EXTENDS_HOME}/flink${FLINK_VERSION}/*"
PID_FILE="dinky.pid"
@@ -129,7 +216,9 @@ start() {
updatePid
if [ -z "$pid" ]; then
nohup java ${PARAMS_OPT} ${JVM_OPTS} ${OOM_OPT} ${GC_OPT} -Xverify:none -cp "${CLASS_PATH}" org.dinky.Dinky ${JAR_PARAMS_OPT} > ${DINKY_LOG_PATH}/dinky-start.log 2>&1 &
- echo $! >"${PID_PATH}"/${PID_FILE}
+ PID=$!
+ echo "${PID}" >"${PID_PATH}"/${PID_FILE}
+ wait_start_process
echo -e "${GREEN}........................................Start Dinky Successfully........................................${RESET}"
echo -e "${GREEN}current log path : ${DINKY_LOG_PATH}/dinky-start.log , you can execute tail -fn1000 ${DINKY_LOG_PATH}/dinky-start.log to watch the log${RESET}"
else
@@ -153,9 +242,11 @@ startWithJmx() {
updatePid
if [ -z "$pid" ]; then
nohup java ${PARAMS_OPT} ${JVM_OPTS} ${OOM_OPT} ${GC_OPT} -Xverify:none "${JMX}" -cp "${CLASS_PATH}" org.dinky.Dinky ${JAR_PARAMS_OPT} > ${DINKY_LOG_PATH}/dinky-start.log 2>&1 &
-# echo $! >"${PID_PATH}"/${PID_FILE}
- updatePid
+ PID=$!
+ wait_start_process
echo -e "$GREEN........................................Start Dinky with Jmx Successfully........................................$RESET"
+ updatePid
+
else
echo -e "$YELLOW Dinky pid $pid is in ${PID_PATH}/${PID_FILE}, Please stop first !!!$RESET"
fi
diff --git a/script/bin/init_check_network.sh b/script/bin/init_check_network.sh
index 66b5cd105b..92335059a9 100644
--- a/script/bin/init_check_network.sh
+++ b/script/bin/init_check_network.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -x
EXTERNAL_CONNECTIVITY_CHECK_URL="www.baidu.com"
diff --git a/script/bin/init_cleanup.sh b/script/bin/init_cleanup.sh
new file mode 100644
index 0000000000..6bcdf048de
--- /dev/null
+++ b/script/bin/init_cleanup.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+export RED='\033[31m'
+export GREEN='\033[32m'
+export YELLOW='\033[33m'
+export BLUE='\033[34m'
+export MAGENTA='\033[35m'
+export CYAN='\033[36m'
+export RESET='\033[0m'
+
+if [ -f /etc/profile.d/dinky_env ]; then
+ source /etc/profile.d/dinky_env
+fi
+
+if [ -z "$DINKY_HOME" ]; then
+ echo -e "${RED}DINKY_HOME is not set, please check the environment variable...${RESET}"
+ exit 1
+else
+ EXTENDS_HOME="$DINKY_HOME"/extends
+ FLINK_VERSION_SCAN=$(ls -n "${EXTENDS_HOME}" | grep '^d' | grep flink | awk -F 'flink' '{print $2}')
+
+ echo -e "${GREEN}>>>>>>>>>>>>>>>>>>>. Start cleaning up the environment... <<<<<<<<<<<<<<<<<<<<${RESET}"
+ echo -e "${GREEN}Cleaning up the environment variables...${RESET}"
+ rm -rf /etc/profile.d/dinky_*
+ echo -e "${GREEN}Cleaning up the flink jar dependencies...${RESET}"
+ rm -rf "${EXTENDS_HOME}"/flink"${FLINK_VERSION_SCAN}"/flink-*
+ echo -e "${GREEN}Cleaning up the flink shaded hadoop jar dependencies...${RESET}"
+ rm -rf "${EXTENDS_HOME}"/flink-shaded-hadoop-*
+ echo -e "${GREEN}Cleaning up the mysql jar dependencies...${RESET}"
+ rm -rf "$DINKY_HOME"/lib/mysql-connector-*
+ echo -e "${GREEN}Cleaning up the environment variables of DINKY_HOME ...${RESET}"
+ unset DINKY_HOME
+ echo -e "${GREEN}Refresh environment variables...${RESET}"
+ source /etc/profile
+ echo -e "${GREEN}Environment cleanup completed...${RESET}"
+fi
diff --git a/script/bin/init_db.sh b/script/bin/init_db.sh
index 22b7372ab2..ae8d2e4f43 100644
--- a/script/bin/init_db.sh
+++ b/script/bin/init_db.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -x
DINKY_HOME_PARAMS=$1
DB_ENV_FILE=$2
diff --git a/script/bin/init_env.sh b/script/bin/init_env.sh
index 94242d380c..57587a14c0 100644
--- a/script/bin/init_env.sh
+++ b/script/bin/init_env.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -x
DINKY_HOME_PARAMS=$1
ENV_FILE=$2
@@ -18,7 +17,7 @@ while true; do
echo -e "${GREEN}Use the automatically obtained DINKY_HOME environment variable${RESET}"
echo -e "${GREEN} The currently obtained path is: $DINKY_HOME_PARAMS to perform automatic configuration${RESET}"
add_to_env "DINKY_HOME" "$DINKY_HOME_PARAMS" "$ENV_FILE"
- add_to_env "PATH" "\$DINKY_HOME/bin" "$ENV_FILE"
+ add_to_env "PATH" "\$DINKY_HOME/bin:\$PATH" "$ENV_FILE"
sleep 2
source $ENV_FILE
echo -e "${GREEN}DINKY_HOME environment variable configuration completed. Please confirm whether the following configuration is correct:${RESET}"
@@ -34,7 +33,7 @@ while true; do
else
echo -e "${GREEN}The path you entered is: $dinky_home_path${RESET}"
add_to_env "DINKY_HOME" "$dinky_home_path" "$ENV_FILE"
- add_to_env "PATH" "\$DINKY_HOME/bin" "$ENV_FILE"
+ add_to_env "PATH" "\$DINKY_HOME/bin:\$PATH" "$ENV_FILE"
sleep 2
source $ENV_FILE
echo -e "${GREEN}DINKY_HOME environment variable configuration completed. Please confirm whether the following configuration is correct:${RESET}"
diff --git a/script/bin/init_flink_dependences.sh b/script/bin/init_flink_dependences.sh
index 451d3b214d..b17be3aff0 100644
--- a/script/bin/init_flink_dependences.sh
+++ b/script/bin/init_flink_dependences.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -x
CURRENT_FLINK_FULL_VERSION=$1
FLINK_VERSION_SCAN=$2
@@ -16,12 +15,16 @@ if [ -z "$CURRENT_FLINK_FULL_VERSION" ] || [ -z "$FLINK_VERSION_SCAN" ] || [ -z
exit 1
fi
-if [ -f "$DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz" ]; then
- echo -e "${YELLOW}$DINKY_TMP_DIR ALREADY EXISTS flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz file,To ensure completeness, delete first ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz Download the file again${RESET}"
- rm -rf ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz
- if [ -d "$DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}" ]; then
- echo -e "${YELLOW}The flink directory already exists, delete it $DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}"
- rm -rf $DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}
+FLINK_STORE_DIR=${DINKY_TMP_DIR}/flink-download
+
+mkdir -p ${FLINK_STORE_DIR}
+
+if [ -f "${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz" ]; then
+ echo -e "${YELLOW}${FLINK_STORE_DIR} ALREADY EXISTS flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz file,To ensure completeness, delete first ${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz Download the file again${RESET}"
+ rm -rf ${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz
+ if [ -d "${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}" ]; then
+ echo -e "${YELLOW}The flink directory already exists, delete it ${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}"
+ rm -rf ${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}
fi
fi
@@ -29,13 +32,13 @@ try_tsinghua_mirror() {
local tsinghua_url="https://mirrors.tuna.tsinghua.edu.cn/apache/flink/flink-${CURRENT_FLINK_FULL_VERSION}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz"
local apache_url="https://archive.apache.org/dist/flink/flink-${CURRENT_FLINK_FULL_VERSION}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz"
- echo -e "${GREEN}Start downloading the Flink-${FLINK_VERSION_SCAN} installation package... Store it in the ${DINKY_TMP_DIR} directory${RESET}"
- if download_file "$tsinghua_url" "$DINKY_TMP_DIR"; then
+ echo -e "${GREEN}Start downloading the Flink-${FLINK_VERSION_SCAN} installation package... Store it in the ${FLINK_STORE_DIR} directory${RESET}"
+ if download_file "$tsinghua_url" "${FLINK_STORE_DIR}"; then
echo -e "${BLUE}The address of the currently downloaded Flink installation package is:${tsinghua_url}${RESET}"
return 0
else
echo -e "${YELLOW}File not found in Tsinghua University mirror, try downloading from Apache official source...${RESET}"
- if download_file "$apache_url" "$DINKY_TMP_DIR"; then
+ if download_file "$apache_url" "${FLINK_STORE_DIR}"; then
echo -e "${BLUE}The address of the currently downloaded Flink installation package is:${apache_url}${RESET}"
return 0
else
@@ -53,7 +56,7 @@ fi
echo -e "${GREEN}Flink installation package download completed。${RESET}"
echo -e "\n${GREEN}===============================================================${RESET}\n"
echo -e "${GREEN}Start decompressing the Flink installation package...${RESET}"
-tar -zxvf ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz -C ${DINKY_TMP_DIR}/
+tar -zxvf ${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz -C ${FLINK_STORE_DIR}/
if [ $? -eq 0 ]; then
echo -e "${GREEN}Flink installation package decompression completed。${RESET}"
else
@@ -63,8 +66,8 @@ fi
echo -e "\n${GREEN}===============================================================${RESET}\n"
-flink_dir_tmp=$(ls -n ${DINKY_TMP_DIR} | grep '^d' | grep flink | awk '{print $9}')
-full_flink_dir_tmp="${DINKY_TMP_DIR}/${flink_dir_tmp}"
+flink_dir_tmp=$(ls -n ${FLINK_STORE_DIR} | grep '^d' | grep flink | awk '{print $9}')
+full_flink_dir_tmp="${FLINK_STORE_DIR}/${flink_dir_tmp}"
echo -e "${BLUE}Unzipped directory name:${full_flink_dir_tmp}${RESET}"
@@ -102,4 +105,6 @@ ls -l ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/
echo -e "${YELLOW}Please check the above dependent files。${RESET}"
+rm -rf ${FLINK_STORE_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}*
+
echo -e "${GREEN}The basic dependency processing is completed, please perform subsequent operations according to the actual situation.${RESET}"
\ No newline at end of file
diff --git a/script/bin/init_hadoop_dependences.sh b/script/bin/init_hadoop_dependences.sh
index c080a44188..0d9dc20916 100644
--- a/script/bin/init_hadoop_dependences.sh
+++ b/script/bin/init_hadoop_dependences.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -x
EXTENDS_HOME=$1
diff --git a/script/bin/init_jdbc_driver.sh b/script/bin/init_jdbc_driver.sh
index 63631af604..ca9fd9c952 100644
--- a/script/bin/init_jdbc_driver.sh
+++ b/script/bin/init_jdbc_driver.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -x
DINKY_LIB_DIR=$1
@@ -7,7 +6,7 @@ echo -e "${GREEN}Start downloading the mysql driver package...${RESET}"
if [ -f "${DINKY_LIB_DIR}/mysql-connector-j-8.4.0.jar" ]; then
echo -e "${YELLOW}mysql The driver package already exists, no need to download it again. Skip this step。${RESET}"
else
- download_file https://repo1.maven.org/maven2/com/mysql/mysql-connector-j/8.4.0/mysql-connector-j-8.4.0.jar "${DINKY_LIB}"
+ download_file https://repo1.maven.org/maven2/com/mysql/mysql-connector-j/8.4.0/mysql-connector-j-8.4.0.jar "${DINKY_LIB_DIR}"
echo -e "${GREEN}Download is complete, please verify. The downloaded file storage address is: ${DINKY_LIB_DIR}/mysql-connector-j-8.4.0.jar${RESET}"
if [ -f "${DINKY_LIB_DIR}/mysql-connector-j-8.4.0.jar" ]; then
echo -e "${GREEN}mysql driver package downloaded successfully。${RESET}"
diff --git a/script/bin/init_tools_main.sh b/script/bin/init_tools_main.sh
index 1b2aae6821..d8757652f6 100644
--- a/script/bin/init_tools_main.sh
+++ b/script/bin/init_tools_main.sh
@@ -1,34 +1,39 @@
#!/bin/bash
-set -x
+
+export RED='\033[31m'
+export GREEN='\033[32m'
+export YELLOW='\033[33m'
+export BLUE='\033[34m'
+export MAGENTA='\033[35m'
+export CYAN='\033[36m'
+export RESET='\033[0m'
+
+# debug mode: true or false If empty, defaults to false
+USE_DEBUG=$1
+
+if [ -z "${USE_DEBUG}" ]; then
+ USE_DEBUG="false"
+ echo -e "${YELLOW}Debug mode is not enabled, if you need to enable debug mode, please add the first parameter 'true' when executing the script. example: init_tools_main.sh true $RESET"
+elif [ "${USE_DEBUG}" = "true" ]; then
+ set -x
+fi
ENV_FILE="/etc/profile.d/dinky_env"
if [ -f "${ENV_FILE}" ]; then
source "${ENV_FILE}"
else
- echo "" > "${ENV_FILE}"
- source "${ENV_FILE}"
+ echo "export PATH=/bin:/usr/bin:\$PATH" > "${ENV_FILE}" && source "${ENV_FILE}"
fi
DB_ENV_FILE="/etc/profile.d/dinky_db"
if [ -f "${DB_ENV_FILE}" ]; then
source "${DB_ENV_FILE}"
else
- echo "" > "${DB_ENV_FILE}"
- source "${DB_ENV_FILE}"
+ echo "export PATH=/bin:/usr/bin:\$PATH" > "${DB_ENV_FILE}" && source "${DB_ENV_FILE}"
fi
chmod 755 $ENV_FILE
chmod 755 $DB_ENV_FILE
-source /etc/profile
-
-
-export RED='\033[31m'
-export GREEN='\033[32m'
-export YELLOW='\033[33m'
-export BLUE='\033[34m'
-export MAGENTA='\033[35m'
-export CYAN='\033[36m'
-export RESET='\033[0m'
echo -e "${GREEN}=====================================================================${RESET}"
echo -e "${GREEN}=====================================================================${RESET}"
@@ -36,11 +41,38 @@ echo -e "${GREEN}============ Welcome to the Dinky initialization script =======
echo -e "${GREEN}======================================================================${RESET}"
echo -e "${GREEN}======================================================================${RESET}"
-APP_HOME=${DINKY_HOME:-$(cd "$(dirname "$0")"; cd ..; pwd)}
-sudo chmod +x "${APP_HOME}"/bin/init_*.sh
+RETURN_HOME_PATH=""
+function get_home_path() {
+ SOURCE="${BASH_SOURCE[0]}"
+ while [ -h "$SOURCE" ]; do
+ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+ SOURCE="$(readlink "$SOURCE")"
+ [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
+ done
+ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
+ RETURN_HOME_PATH=$(dirname "$DIR")
+}
+
+
+
+if [ -z "${DINKY_HOME}" ]; then
+ echo -e "${RED}DINKY_HOME environment variable is not set. Attempting to determine the correct path...${RESET}"
+ get_home_path
+ APP_HOME="${RETURN_HOME_PATH}"
+else
+ get_home_path
+ if [ "${DINKY_HOME}" != "${RETURN_HOME_PATH}" ]; then
+ echo -e "${YELLOW}DINKY_HOME is not equal to the current path, use new path to init: ${RETURN_HOME_PATH}${RESET}"
+ APP_HOME="${RETURN_HOME_PATH}"
+ else
+ echo -e "${GREEN}DINKY_HOME is already set to: ${DINKY_HOME}${RESET}"
+ fi
+fi
+echo -e "${GREEN}Dinky root path: ${APP_HOME} ${RESET}"
+sudo chmod +x "${APP_HOME}"/bin/init_*.sh
EXTENDS_HOME="${APP_HOME}/extends"
if [ ! -d "${EXTENDS_HOME}" ]; then
@@ -68,7 +100,7 @@ if [ ! -d "${DINKY_LIB}" ]; then
exit 1
fi
-# 函数:检查命令是否存在,不存在则尝试安装
+# Function: Check whether the command exists, if not, try to install it
check_command() {
local cmd="$1"
echo -e "${BLUE}Check if command: $cmd exists...${RESET}"
@@ -87,7 +119,8 @@ check_command() {
echo -e "${GREEN}========== Command $cmd check completed. OK, continue executing the script. ==========${RESET}"
}
-sh "${APP_HOME}/bin/init_check_network.sh"
+# Use source transparent transmission in debug mode
+source "${APP_HOME}/bin/init_check_network.sh"
check_command "wget"
@@ -124,29 +157,41 @@ export -f add_to_env
echo
echo
+function init_env() {
+ while true; do
+ read -p "Do you need to configure the DINKY_HOME environment variable? (yes/no):" is_init_dinky_home
+ is_init_dinky_home=$(echo "$is_init_dinky_home" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')
+ case $is_init_dinky_home in
+ yes | y)
+ # Use source transparent transmission in debug mode
+ source "${APP_HOME}"/bin/init_env.sh ${APP_HOME} ${ENV_FILE}
+ echo -e "${GREEN}DINKY_HOME environment variable configuration completed. the configuration file is:${ENV_FILE} ${RESET}"
+ break
+ ;;
+ no | n)
+ echo -e "${GREEN}Skip DINKY_HOME environment variable configuration.${RESET}"
+ break
+ ;;
+ *)
+ echo -e "${RED}The entered value is incorrect, please rerun the script to select the correct value.${RESET}"
+ ;;
+ esac
+ done
+}
+
+
+
echo -e "${GREEN} ====================== Environment variable initialization script -> Start ====================== ${RESET}"
DINKY_HOME_TMP=$(echo $DINKY_HOME)
if [ -z "$DINKY_HOME_TMP" ]; then
- while true; do
- read -p "Do you need to configure the DINKY_HOME environment variable? (yes/no):" is_init_dinky_home
- is_init_dinky_home=$(echo "$is_init_dinky_home" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')
- case $is_init_dinky_home in
- yes | y)
- sh "${APP_HOME}"/bin/init_env.sh ${APP_HOME} ${ENV_FILE}
- echo -e "${GREEN}DINKY_HOME environment variable configuration completed. the configuration file is:${ENV_FILE} ${RESET}"
- break
- ;;
- no | n)
- echo -e "${GREEN}Skip DINKY_HOME environment variable configuration.${RESET}"
- break
- ;;
- *)
- echo -e "${RED}The entered value is incorrect, please rerun the script to select the correct value.${RESET}"
- ;;
- esac
- done
+ init_env
else
- echo -e "${GREEN}DINKY_HOME environment variable has been configured at ${DINKY_HOME_TMP},Skip configuration.${RESET}"
+ if [ "$APP_HOME" != "$DINKY_HOME_TMP" ]; then
+ echo -e "${RED}DINKY_HOME is not equal to the current path, The previous one was: ${DINKY_HOME_TMP}. The current one is: ${APP_HOME}, which needs to be reconfigured.${RESET}"
+ init_env
+ else
+ echo -e "${GREEN}DINKY_HOME environment variable has been configured at ${DINKY_HOME_TMP},Skip configuration.${RESET}"
+ fi
fi
@@ -162,17 +207,18 @@ while true; do
echo -e "${BLUE} ======== (h2 comes with it by default and does not need to perform this step)=========== ${RESET}"
echo -e "${BLUE} ============================== Please select 1, 2, 3 ====================================== ${RESET}"
echo -e "${BLUE} ==================================== 1. mysql ============================================= ${RESET}"
- echo -e "${BLUE} ==================================== 2. pgsql ========================================= ${RESET}"
+ echo -e "${BLUE} ==================================== 2. postgresql ========================================= ${RESET}"
echo -e "${BLUE} ================================ 3. Skip this step ========================================== ${RESET}"
echo -e "${BLUE} ================================ Enter number selection ================================== ${RESET}"
read -p "Please enter your database type:" db_type
case $db_type in
1)
- sh "${APP_HOME}"/bin/init_jdbc_driver.sh "${DINKY_LIB}"
+ # Use source transparent transmission in debug mode
+ source "${APP_HOME}"/bin/init_jdbc_driver.sh "${DINKY_LIB}"
break
;;
2)
- echo -e "${GREEN}It seems that pgsql has been integrated by default, so there is no need to perform this step. Please perform subsequent installation and configuration operations as needed.${RESET}"
+ echo -e "${GREEN}It seems that postgresql has been integrated by default, so there is no need to perform this step. Please perform subsequent installation and configuration operations as needed.${RESET}"
break
;;
3)
@@ -208,19 +254,18 @@ else
echo -e "${GREEN}The current Flink version number deployed by Dinky:${FLINK_VERSION_SCAN}${RESET}"
fi
-# 根据 Dinky 部署的Flink对应的版本号,获取对应的 Flink 版本
CURRENT_FLINK_FULL_VERSION=${version_map[$FLINK_VERSION_SCAN]}
echo -e "${GREEN}Obtain the version number corresponding to the deployed Flink (full version number) based on the scanned current Flink version number: flink-${CURRENT_FLINK_FULL_VERSION}${RESET}"
-# 步骤2:获取Dinky部署的Flink对应的版本号,然后下载Flink安装包
while true; do
read -p "It is detected that the Flink version number deployed by Dinky is: ${FLINK_VERSION_SCAN}, and the Flink installation package version number that needs to be downloaded is: flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz. Please choose whether to initialize Flink related dependencies?(yes/no/exit)" is_init_flink
is_init_flink=$(echo "$is_init_flink" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')
case $is_init_flink in
yes | y )
- sh "${APP_HOME}"/bin/init_flink_dependences.sh "${CURRENT_FLINK_FULL_VERSION}" "${FLINK_VERSION_SCAN}" "${DINKY_TMP_DIR}" "${EXTENDS_HOME}" "${APP_HOME}"
+ # Use source transparent transmission in debug mode
+ source "${APP_HOME}"/bin/init_flink_dependences.sh "${CURRENT_FLINK_FULL_VERSION}" "${FLINK_VERSION_SCAN}" "${DINKY_TMP_DIR}" "${EXTENDS_HOME}" "${APP_HOME}"
break
;;
no | n )
@@ -248,7 +293,8 @@ while true; do
is_hadoop=$(echo "$is_hadoop" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')
case $is_hadoop in
yes | y )
- sh "${APP_HOME}/bin/init_hadoop_dependences.sh" "${EXTENDS_HOME}"
+ # Use source transparent transmission in debug mode
+ source "${APP_HOME}/bin/init_hadoop_dependences.sh" "${EXTENDS_HOME}"
break
;;
no | n )
@@ -277,7 +323,8 @@ while true; do
is_init_db=$(echo "$is_init_db" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')
case $is_init_db in
yes | y )
- sh "${APP_HOME}/bin/init_db.sh" "${DINKY_HOME}" "${DB_ENV_FILE}"
+ # Use source transparent transmission in debug mode
+ source "${APP_HOME}/bin/init_db.sh" "${DINKY_HOME}" "${DB_ENV_FILE}"
echo -e "${GREEN}The database configuration file initialization script has been executed successfully the configuration file is:${DB_ENV_FILE} ${RESET}"
break
;;
@@ -295,9 +342,44 @@ while true; do
esac
done
echo -e "${GREEN} ====================== Database configuration file initialization script -> End ====================== ${RESET}"
+
+function echo_warning_msg() {
+ echo -e "${RED}Note: To make these changes permanent, you may need to restart your terminal or run 'source $DB_ENV_FILE && source $ENV_FILE' ${RESET}"
+ echo -e "${RED}Note: To make these changes permanent, you may need to restart your terminal or run 'source $DB_ENV_FILE && source $ENV_FILE' ${RESET}"
+ echo -e "${RED}Note: To make these changes permanent, you may need to restart your terminal or run 'source $DB_ENV_FILE && source $ENV_FILE' ${RESET}"
+}
+
+echo -e "${GREEN} ====================== Dinky service startup script -> Start ====================== ${RESET}"
+
+while true; do
+ read -p "Do you need to start the Dinky service?(yes/no/exit)" is_start
+ is_start=$(echo "$is_start" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')
+ case $is_start in
+ yes | y )
+ # Use source transparent transmission in debug mode
+ source "${APP_HOME}"/bin/auto.sh restart
+ break
+ ;;
+ no | n )
+ echo_warning_msg
+ echo
+ echo -e "${GREEN}The Dinky service startup script has been skipped, Please execute the above command first, and then start the service manually -> ${APP_HOME}/bin/auto.sh restart | start。${RESET}"
+ break
+ ;;
+ exit | e )
+ echo_warning_msg
+ echo -e "${GREEN}If you choose exit, the program will exit。${RESET}"
+ exit 0
+ ;;
+ *)
+ echo -e "${RED}Invalid input, please re-enter yes/no/exit。${RESET}"
+ ;;
+ esac
+done
+echo -e "${GREEN} ====================== Dinky service startup script -> End ====================== ${RESET}"
echo
echo
-echo -e "${RED}Note: To make these changes permanent, you may need to restart your terminal or run 'source $DB_ENV_FILE && source $ENV_FILE' ${RESET}"
-echo -e "${RED}Note: To make these changes permanent, you may need to restart your terminal or run 'source $DB_ENV_FILE && source $ENV_FILE' ${RESET}"
-echo -e "${RED}Note: To make these changes permanent, you may need to restart your terminal or run 'source $DB_ENV_FILE && source $ENV_FILE' ${RESET}"
-echo
+echo -e "${GREEN} ====================== Dinky initialization script execution completed ====================== ${RESET}"
+
+
+set +x
\ No newline at end of file
diff --git a/script/bin/parse_yml.sh b/script/bin/parse_yml.sh
new file mode 100644
index 0000000000..63df089c35
--- /dev/null
+++ b/script/bin/parse_yml.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+
+# Check whether the number of parameters is correct
+if [ $# -ne 2 ]; then
+ echo "Please enter the parameters in the correct format, the format is: $0 yaml_file_path key"
+ exit 1
+fi
+
+
+yaml_file_path="$1"
+key="$2"
+# Split keys into arrays according to . (used to handle hierarchical keys)
+IFS='.' read -ra key_parts <<< "$key"
+
+# Used to store the finally found value
+value=""
+
+# Process the file first and remove comment lines (assuming the comment starts with #) and blank lines
+temp_file=$(mktemp)
+grep -Ev '^(#|$)' "$yaml_file_path" > "$temp_file"
+
+# Start looking for values by levels
+current_data=$(cat "$temp_file")
+for part in "${key_parts[@]}"; do
+ found=false
+ while IFS= read -r line; do
+
+ if [[ $line =~ ^$part: ]]; then
+ # If it is the last key, extract the value
+ if [ "$part" == "${key_parts[${#key_parts[@]}-1]}" ]; then
+ value=$(echo "$line" | sed 's/.*: //')
+ found=true
+ break
+ else
+ # If it is not the last key, get the data range of the next level
+ start_line_num=$(grep -n "$line" "$temp_file" | cut -d: -f1)
+ end_line_num=$(awk -v start="$start_line_num" '$0 ~ /^[a-zA-Z]/ && NR > start {print NR - 1; exit}' "$temp_file")
+ if [ -z "$end_line_num" ]; then
+ end_line_num=$(wc -l < "$temp_file")
+ fi
+
+ current_data=$(sed -n "$((start_line_num + 1)),$((end_line_num))p" "$temp_file")
+ current_data=$(echo "$current_data" | sed 's/^[[:space:]]*//')
+ found=true
+ break
+ fi
+ fi
+ done <<< "$current_data"
+ if [ "$found" = false ]; then
+ value=""
+ break
+ fi
+done
+
+# Delete temporary files
+rm -f "$temp_file"
+
+# Output the found value to standard output for easy external acquisition
+if [ -n "$value" ]; then
+ echo "$value"
+else
+ echo ""
+fi
\ No newline at end of file
From 3c84748f9d6f0f0a20635e0e4827a1d3e15aa902 Mon Sep 17 00:00:00 2001
From: Zzm0809 <934230207@qq.com>
Date: Mon, 9 Dec 2024 17:12:36 +0800
Subject: [PATCH 3/4] [Optimization] modify sqllite data position (#4032)
Co-authored-by: Zzm0809
---
.../src/main/java/org/dinky/utils/SqliteUtil.java | 12 ++++++++----
.../dinky/data/constant/MonitorTableConstant.java | 2 +-
2 files changed, 9 insertions(+), 5 deletions(-)
diff --git a/dinky-admin/src/main/java/org/dinky/utils/SqliteUtil.java b/dinky-admin/src/main/java/org/dinky/utils/SqliteUtil.java
index 328aeb71d1..4dfc221efb 100644
--- a/dinky-admin/src/main/java/org/dinky/utils/SqliteUtil.java
+++ b/dinky-admin/src/main/java/org/dinky/utils/SqliteUtil.java
@@ -19,6 +19,9 @@
package org.dinky.utils;
+import org.dinky.data.constant.DirConstant;
+import org.dinky.data.constant.MonitorTableConstant;
+
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
@@ -35,11 +38,12 @@ public enum SqliteUtil {
INSTANCE;
private Connection connection;
- private final AtomicLong lastRecyle = new AtomicLong(0);
+ private final AtomicLong lastRecycle = new AtomicLong(0);
static {
try {
- SqliteUtil.INSTANCE.connect("dinky.db");
+ SqliteUtil.INSTANCE.connect(
+ DirConstant.getTempRootDir() + DirConstant.FILE_SEPARATOR + MonitorTableConstant.DINKY_DB);
SqliteUtil.INSTANCE.recyleData();
} catch (SQLException e) {
throw new RuntimeException(e);
@@ -68,10 +72,10 @@ public void executeSql(String sql) throws SQLException {
public void recyleData() {
long now = System.currentTimeMillis();
- if (now - lastRecyle.get() < 1000 * 60 * 60) {
+ if (now - lastRecycle.get() < 1000 * 60 * 60) {
return;
}
- lastRecyle.set(now);
+ lastRecycle.set(now);
try {
String sql = "DELETE FROM dinky_metrics WHERE heart_time <= datetime('now', '-7 days')";
executeSql(sql);
diff --git a/dinky-common/src/main/java/org/dinky/data/constant/MonitorTableConstant.java b/dinky-common/src/main/java/org/dinky/data/constant/MonitorTableConstant.java
index 5a5e3507e9..c46f6525ec 100644
--- a/dinky-common/src/main/java/org/dinky/data/constant/MonitorTableConstant.java
+++ b/dinky-common/src/main/java/org/dinky/data/constant/MonitorTableConstant.java
@@ -20,7 +20,7 @@
package org.dinky.data.constant;
public final class MonitorTableConstant {
- public static final String DINKY_DB = "dinky_db";
+ public static final String DINKY_DB = "dinky.db";
public static final String DINKY_METRICS = "dinky_metrics";
public static final String HEART_TIME = "heart_time";
public static final String JOB_ID = "job_id";
From 4a2dfdeb65cf5b717cbcecab8a48da9e9e17c424 Mon Sep 17 00:00:00 2001
From: "ze.miao"
Date: Mon, 9 Dec 2024 17:15:30 +0800
Subject: [PATCH 4/4] [BugFix] Solve issues-3470: Yarn webui fails to obtain
task status when submitting a Flink task after turning on Kerberos
authentication. (#4020)
Co-authored-by: ze.miao
Co-authored-by: GH Action - Upstream Sync
---
.../utils/RequestKerberosUrlUtils.java | 160 ++++++++++++++++++
.../org/dinky/gateway/yarn/YarnGateway.java | 37 ++++
2 files changed, 197 insertions(+)
create mode 100644 dinky-gateway/src/main/java/org/dinky/gateway/utils/RequestKerberosUrlUtils.java
diff --git a/dinky-gateway/src/main/java/org/dinky/gateway/utils/RequestKerberosUrlUtils.java b/dinky-gateway/src/main/java/org/dinky/gateway/utils/RequestKerberosUrlUtils.java
new file mode 100644
index 0000000000..4157c6688b
--- /dev/null
+++ b/dinky-gateway/src/main/java/org/dinky/gateway/utils/RequestKerberosUrlUtils.java
@@ -0,0 +1,160 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.dinky.gateway.utils;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthSchemeProvider;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.Credentials;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.config.AuthSchemes;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.config.Lookup;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+
+import java.io.IOException;
+import java.security.Principal;
+import java.security.PrivilegedAction;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RequestKerberosUrlUtils {
+ public static Logger logger = LoggerFactory.getLogger(RequestKerberosUrlUtils.class);
+ private String principal;
+ private String keyTabLocation;
+
+ public RequestKerberosUrlUtils() {}
+
+ public RequestKerberosUrlUtils(String principal, String keyTabLocation) {
+ this.principal = principal;
+ this.keyTabLocation = keyTabLocation;
+ }
+
+ public RequestKerberosUrlUtils(String principal, String keyTabLocation, boolean isDebug) {
+ this(principal, keyTabLocation);
+ if (isDebug) {
+ System.setProperty("sun.security.spnego.debug", "true");
+ System.setProperty("sun.security.krb5.debug", "true");
+ }
+ }
+
+ public RequestKerberosUrlUtils(String principal, String keyTabLocation, String krb5Location, boolean isDebug) {
+ this(principal, keyTabLocation, isDebug);
+ // System.setProperty("java.security.krb5.conf", krb5Location);
+ }
+
+ private static HttpClient buildSpengoHttpClient() {
+
+ Lookup authSchemeRegistry = RegistryBuilder.create()
+ .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true))
+ .build();
+
+ BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+ credentialsProvider.setCredentials(new AuthScope(null, -1, null), new Credentials() {
+ @Override
+ public Principal getUserPrincipal() {
+ return null;
+ }
+
+ @Override
+ public String getPassword() {
+ return null;
+ }
+ });
+
+ CloseableHttpClient httpClient = HttpClientBuilder.create()
+ .setDefaultAuthSchemeRegistry(authSchemeRegistry)
+ .setDefaultCredentialsProvider(credentialsProvider)
+ .build();
+ return httpClient;
+ }
+
+ public HttpResponse callRestUrl(final String url, final String userId) {
+ // logger.warn(String.format("Calling KerberosHttpClient %s %s %s", this.principal, this.keyTabLocation,
+ // url));
+ Configuration config = new Configuration() {
+ @Override
+ public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+ HashMap options = new HashMap() {
+ {
+ put("useTicketCache", "false");
+ put("useKeyTab", "true");
+ put("keyTab", keyTabLocation);
+ // Krb5 in GSS API needs to be refreshed so it does not throw the error
+ // Specified version of key is not available
+ put("refreshKrb5Config", "true");
+ put("principal", principal);
+ put("storeKey", "true");
+ put("doNotPrompt", "true");
+ put("isInitiator", "true");
+ put("debug", "true");
+ }
+ };
+ return new AppConfigurationEntry[] {
+ new AppConfigurationEntry(
+ "com.sun.security.auth.module.Krb5LoginModule",
+ AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+ options)
+ };
+ }
+ };
+ Set princ = new HashSet(1);
+ princ.add(new KerberosPrincipal(userId));
+ Subject sub = new Subject(false, princ, new HashSet