forked from DataLinkDC/dinky
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
132 additions
and
170 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,111 +1,104 @@ | ||
#!/bin/bash | ||
|
||
# 拿到参数并赋值变量 | ||
CURRENT_FLINK_FULL_VERSION=$1 | ||
FLINK_VERSION_SCAN=$2 | ||
DINKY_TMP_DIR=$3 | ||
EXTENDS_HOME=$4 | ||
DINKY_HOME=$5 | ||
|
||
echo -e "${GREEN}====================== Flink 依赖初始化 ======================${RESET}" | ||
echo -e "${GREEN}====================== Flink dependency initialization ======================${RESET}" | ||
|
||
echo -e "${BLUE}参数: 当前Flink版本为:${CURRENT_FLINK_FULL_VERSION},扫描的Flink版本为:${FLINK_VERSION_SCAN} ,临时目录为:${DINKY_TMP_DIR} ,扩展包目录为:${EXTENDS_HOME} ,Dinky 根目录为:${DINKY_HOME}${RESET}" | ||
echo -e "${BLUE}Parameters: The current Flink version is:${CURRENT_FLINK_FULL_VERSION},The scanned Flink version is:${FLINK_VERSION_SCAN} ,The temporary directory is:${DINKY_TMP_DIR} ,The expansion package directory is:${EXTENDS_HOME} ,Dinky The root directory is:${DINKY_HOME}${RESET}" | ||
|
||
# 检查参数 | ||
if [ -z "$CURRENT_FLINK_FULL_VERSION" ] || [ -z "$FLINK_VERSION_SCAN" ] || [ -z "$DINKY_TMP_DIR" ] || [ -z "$EXTENDS_HOME" ] || [ -z "$DINKY_HOME" ]; then | ||
echo -e "${RED}参数错误,请检查!${RESET}" | ||
echo -e "${RED}Parameter error, please check!${RESET}" | ||
exit 1 | ||
fi | ||
|
||
# 检查是否已存在flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz 文件 | ||
if [ -f "$DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz" ]; then | ||
echo -e "${YELLOW}$DINKY_TMP_DIR 下已存在 flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz 文件,为了确保完整性, 执行先删除 ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz 文件后重新下载${RESET}" | ||
echo -e "${YELLOW}$DINKY_TMP_DIR ALREADY EXISTS flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz file,To ensure completeness, delete first ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz Download the file again${RESET}" | ||
rm -rf ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz | ||
# 是否有解压之后的flink目录,有则删除 | ||
if [ -d "$DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}" ]; then | ||
echo -e "${YELLOW}已存在 flink 目录,删除 $DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}" | ||
echo -e "${YELLOW}The flink directory already exists, delete it $DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION}" | ||
rm -rf $DINKY_TMP_DIR/flink-${CURRENT_FLINK_FULL_VERSION} | ||
fi | ||
fi | ||
|
||
# 尝试从清华大学镜像下载 | ||
try_tsinghua_mirror() { | ||
local tsinghua_url="https://mirrors.tuna.tsinghua.edu.cn/apache/flink/flink-${CURRENT_FLINK_FULL_VERSION}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz" | ||
local apache_url="https://archive.apache.org/dist/flink/flink-${CURRENT_FLINK_FULL_VERSION}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz" | ||
|
||
echo -e "${GREEN}开始下载 Flink-${FLINK_VERSION_SCAN} 安装包... 存放至 ${DINKY_TMP_DIR} 目录下${RESET}" | ||
echo -e "${GREEN}Start downloading the Flink-${FLINK_VERSION_SCAN} installation package... Store it in the ${DINKY_TMP_DIR} directory${RESET}" | ||
if download_file "$tsinghua_url" "$DINKY_TMP_DIR"; then | ||
echo -e "${BLUE}当前下载的Flink安装包地址为:${tsinghua_url}${RESET}" | ||
echo -e "${BLUE}The address of the currently downloaded Flink installation package is:${tsinghua_url}${RESET}" | ||
return 0 | ||
else | ||
echo -e "${YELLOW}清华大学镜像中未找到文件,尝试从 Apache 官方源下载...${RESET}" | ||
echo -e "${YELLOW}File not found in Tsinghua University mirror, try downloading from Apache official source...${RESET}" | ||
if download_file "$apache_url" "$DINKY_TMP_DIR"; then | ||
echo -e "${BLUE}当前下载的Flink安装包地址为:${apache_url}${RESET}" | ||
echo -e "${BLUE}The address of the currently downloaded Flink installation package is:${apache_url}${RESET}" | ||
return 0 | ||
else | ||
echo -e "${RED}从 Apache 官方源下载也失败了,请检查网络或手动下载。${RESET}" | ||
echo -e "${RED}Downloading from Apache official source also failed, please check the network or download manually。${RESET}" | ||
return 1 | ||
fi | ||
fi | ||
} | ||
|
||
# 调用下载函数 | ||
if ! try_tsinghua_mirror; then | ||
exit 0 | ||
fi | ||
|
||
|
||
echo -e "${GREEN}Flink安装包下载完成。${RESET}" | ||
echo -e "${GREEN}Flink installation package download completed。${RESET}" | ||
echo -e "\n${GREEN}===============================================================${RESET}\n" | ||
echo -e "${GREEN}开始解压Flink安装包...${RESET}" | ||
echo -e "${GREEN}Start decompressing the Flink installation package...${RESET}" | ||
tar -zxvf ${DINKY_TMP_DIR}/flink-${CURRENT_FLINK_FULL_VERSION}-bin-scala_2.12.tgz -C ${DINKY_TMP_DIR}/ | ||
if [ $? -eq 0 ]; then | ||
echo -e "${GREEN}Flink安装包解压完成。${RESET}" | ||
echo -e "${GREEN}Flink installation package decompression completed。${RESET}" | ||
else | ||
echo -e "${RED}Flink安装包解压失败,请检查。${RESET}" | ||
echo -e "${RED}Flink installation package failed to decompress, please check。${RESET}" | ||
exit 1 | ||
fi | ||
|
||
echo -e "\n${GREEN}===============================================================${RESET}\n" | ||
|
||
# 查看并获取解压后的flink开头的目录名称 | ||
flink_dir_tmp=$(ls -n ${DINKY_TMP_DIR} | grep '^d' | grep flink | awk '{print $9}') | ||
full_flink_dir_tmp="${DINKY_TMP_DIR}/${flink_dir_tmp}" | ||
echo -e "${BLUE}解压后的目录名称:${full_flink_dir_tmp}${RESET}" | ||
echo -e "${BLUE}Unzipped directory name:${full_flink_dir_tmp}${RESET}" | ||
|
||
|
||
|
||
echo -e "${GREEN}处理 ${full_flink_dir_tmp}/lib/flink-table-planner-loader* 文件...${RESET}" | ||
echo -e "${GREEN}Process ${full_flink_dir_tmp}/lib/flink-table-planner-loader* file...${RESET}" | ||
rm -rf ${full_flink_dir_tmp}/lib/flink-table-planner-loader* | ||
echo -e "${GREEN}处理完成。${RESET}" | ||
echo -e "${GREEN}Processing completed。${RESET}" | ||
|
||
echo -e "${GREEN}处理 ${full_flink_dir_tmp}/opt/flink-table-planner_2.12-*.jar 文件...${RESET}" | ||
echo -e "${GREEN}Process ${full_flink_dir_tmp}/opt/flink-table-planner_2.12-*.jar file...${RESET}" | ||
mv ${full_flink_dir_tmp}/opt/flink-table-planner_2.12-*.jar ${full_flink_dir_tmp}/lib/ | ||
echo -e "${GREEN}处理完成。${RESET}" | ||
echo -e "${GREEN}Processing completed。${RESET}" | ||
|
||
echo -e "${GREEN}处理 flink jar 依赖 到 dinky 中...${RESET}" | ||
echo -e "${GREEN}Process flink jar dependencies into dinky...${RESET}" | ||
cp -r ${full_flink_dir_tmp}/lib/*.jar ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ | ||
echo -e "${GREEN}jar 依赖处理完成。${RESET}" | ||
echo -e "${GREEN}jar dependency processing completed。${RESET}" | ||
|
||
echo -e "${GREEN}处理 flink-sql-client ...${RESET}" | ||
echo -e "${GREEN}Process flink-sql-client ...${RESET}" | ||
cp -r ${full_flink_dir_tmp}/opt/flink-sql-client-*.jar ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ | ||
echo -e "${GREEN}处理完成。${RESET}" | ||
echo -e "${GREEN}Processing completed。${RESET}" | ||
|
||
echo -e "${GREEN}处理 flink-cep-scala ...${RESET}" | ||
echo -e "${GREEN}Process flink-cep-scala ...${RESET}" | ||
cp -r ${full_flink_dir_tmp}/opt/flink-cep-scala*.jar ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ | ||
echo -e "${GREEN}处理完成。${RESET}" | ||
echo -e "${GREEN}Processing completed。${RESET}" | ||
|
||
echo -e "${GREEN}处理 flink-queryable-state-runtime ...${RESET}" | ||
echo -e "${GREEN}Process flink-queryable-state-runtime ...${RESET}" | ||
cp -r ${full_flink_dir_tmp}/opt/flink-queryable-state-runtime*.jar ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ | ||
echo -e "${GREEN}处理完成。${RESET}" | ||
echo -e "${GREEN}Processing completed。${RESET}" | ||
|
||
echo -e "${GREEN}处理 flink-state-processor-api ...${RESET}" | ||
echo -e "${GREEN}Process flink-state-processor-api ...${RESET}" | ||
cp -r ${full_flink_dir_tmp}/opt/flink-state-processor-api*.jar ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ | ||
echo -e "${GREEN}处理完成。${RESET}" | ||
echo -e "${GREEN}Processing completed。${RESET}" | ||
|
||
echo -e "${GREEN} ================= 列出 ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ 目录下的文件 ==============${RESET}" | ||
echo -e "${GREEN} ================= List files in the ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ directory ==============${RESET}" | ||
ls -l ${EXTENDS_HOME}/flink${FLINK_VERSION_SCAN}/ | ||
# 请核对以上依赖文件 | ||
echo -e "${YELLOW}请核对以上依赖文件。${RESET}" | ||
|
||
echo -e "${GREEN}基础依赖处理完成, 请根据实际情况进行后续操作。${RESET}" | ||
echo -e "${YELLOW}Please check the above dependent files。${RESET}" | ||
|
||
echo -e "${GREEN}The basic dependency processing is completed, please perform subsequent operations according to the actual situation.${RESET}" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,42 +1,35 @@ | ||
#!/bin/bash | ||
|
||
# 拿到参数并赋值变量 | ||
EXTENDS_HOME=$1 | ||
|
||
|
||
echo -e "${GREEN}====================== Hadoop 依赖初始化 ======================${RESET}" | ||
echo -e "${BLUE}参数: 扩展包目录为:${EXTENDS_HOME}${RESET}" | ||
echo -e "${GREEN}====================== Hadoop dependency initialization ======================${RESET}" | ||
|
||
# 选择要下载的 Hadoop-uber 版本 | ||
read -p "请选择要下载的 Hadoop-uber 版本(输入 2 或者 3):" hadoop_uber_version | ||
read -p "Please select the Hadoop-uber version to download (enter 2 or 3):" hadoop_uber_version | ||
hadoop_uber_version=$(echo "$hadoop_uber_version" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]') | ||
|
||
# 根据用户选择的版本下载相应的 Hadoop-uber 包 | ||
case $hadoop_uber_version in | ||
2) | ||
echo -e "${YELLOW}开始下载 Hadoop-uber 2 版本包...${RESET}" | ||
# 检查是否已下载过 Hadoop-uber 包 | ||
echo -e "${YELLOW}Start downloading Hadoop-uber 2 version package...${RESET}" | ||
if [ -f "$EXTENDS_HOME/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar" ]; then | ||
echo -e "${YELLOW}已存在 flink-shaded-hadoop-2-uber-2.8.3-10.0.jar 文件,无需重复下载。${RESET}" | ||
echo -e "${YELLOW}The flink-shaded-hadoop-2-uber-2.8.3-10.0.jar file already exists and there is no need to download it again.${RESET}" | ||
else | ||
download_url="https://repo1.maven.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-10.0/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar" | ||
download_file "$download_url" "$EXTENDS_HOME" | ||
fi | ||
;; | ||
3) | ||
# 检查是否已下载过 Hadoop-uber 包 | ||
# 检查是否已下载过 Hadoop-uber 包 | ||
if [ -f "$EXTENDS_HOME/flink-shaded-hadoop-3-uber-3.1.1.7.2.9.0-173-9.0.jar" ]; then | ||
echo -e "${YELLOW}已存在 flink-shaded-hadoop-3-uber-3.1.1.7.2.9.0-173-9.0.jar 文件,无需重复下载。${RESET}" | ||
echo -e "${YELLOW}The flink-shaded-hadoop-3-uber-3.1.1.7.2.9.0-173-9.0.jar file already exists and there is no need to download it again.${RESET}" | ||
else | ||
echo -e "${YELLOW}开始下载 Hadoop-uber 3 版本包...${RESET}" | ||
echo -e "${YELLOW}Start downloading Hadoop-uber 3 version package...${RESET}" | ||
download_url="https://repository.cloudera.com/artifactory/cloudera-repos/org/apache/flink/flink-shaded-hadoop-3-uber/3.1.1.7.2.9.0-173-9.0/flink-shaded-hadoop-3-uber-3.1.1.7.2.9.0-173-9.0.jar" | ||
download_file "$download_url" "$EXTENDS_HOME" | ||
fi | ||
;; | ||
*) | ||
echo -e "${RED}输入的版本号不正确,请重新运行脚本选择正确的版本。${RESET}" | ||
echo -e "${RED}The entered version number is incorrect, please re-run the script to select the correct version.${RESET}" | ||
;; | ||
esac | ||
|
||
echo -e "${GREEN}下载完成,可按需进行后续安装配置操作。${RESET}" | ||
echo -e "${GREEN}After the download is completed, subsequent installation and configuration operations can be performed as needed.${RESET}" |
Oops, something went wrong.