Skip to content

Commit

Permalink
fix: optimize MysqlServerStateManager and resolve conflicts in metric…
Browse files Browse the repository at this point in the history
… plugins
  • Loading branch information
GSHF committed Dec 30, 2024
1 parent d79ab68 commit 08ddbbb
Show file tree
Hide file tree
Showing 10 changed files with 65 additions and 151 deletions.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
local_daily_avg=io.datavines.metric.expected.plugin.DailyAvg
spark_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg
<<<<<<< HEAD
livy_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg
flink_daily_avg=io.datavines.metric.expected.plugin.DailyAvg
=======
livy_daily_avg=io.datavines.metric.expected.plugin.SparkDailyAvg
>>>>>>> upstream/dev
flink_daily_avg=io.datavines.metric.expected.plugin.DailyAvg
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
local_fix_value=io.datavines.metric.expected.plugin.FixValue
spark_fix_value=io.datavines.metric.expected.plugin.FixValue
<<<<<<< HEAD
livy_fix_value=io.datavines.metric.expected.plugin.FixValue
flink_fix_value=io.datavines.metric.expected.plugin.FixValue
=======
livy_fix_value=io.datavines.metric.expected.plugin.FixValue
>>>>>>> upstream/dev
flink_fix_value=io.datavines.metric.expected.plugin.FixValue
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
local_last_30d_avg=io.datavines.metric.expected.plugin.Last30DayAvg
spark_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg
<<<<<<< HEAD
livy_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg
flink_last_30d_avg=io.datavines.metric.expected.plugin.Last30DayAvg
=======
livy_last_30d_avg=io.datavines.metric.expected.plugin.SparkLast30DayAvg
>>>>>>> upstream/dev
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
local_last_7d_avg=io.datavines.metric.expected.plugin.Last7DayAvg
spark_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg
<<<<<<< HEAD
livy_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg
flink_last_7d_avg=io.datavines.metric.expected.plugin.Last7DayAvg
=======
livy_last_7d_avg=io.datavines.metric.expected.plugin.SparkLast7DayAvg
>>>>>>> upstream/dev
flink_last_7d_avg=io.datavines.metric.expected.plugin.Last7DayAvg
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
local_monthly_avg=io.datavines.metric.expected.plugin.MonthlyAvg
spark_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg
<<<<<<< HEAD
livy_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg
flink_monthly_avg=io.datavines.metric.expected.plugin.MonthlyAvg
=======
livy_monthly_avg=io.datavines.metric.expected.plugin.SparkMonthlyAvg
>>>>>>> upstream/dev
flink_monthly_avg=io.datavines.metric.expected.plugin.MonthlyAvg
Original file line number Diff line number Diff line change
Expand Up @@ -176,22 +176,25 @@ private boolean isExists(ServerInfo serverInfo) throws SQLException {

private ConcurrentHashMap<String, ServerInfo> fetchServers() throws SQLException {
checkConnection();
ConcurrentHashMap<String, ServerInfo> serverMap = new ConcurrentHashMap<>();
PreparedStatement preparedStatement = connection.prepareStatement("select * from dv_server");
ResultSet resultSet = preparedStatement.executeQuery();

if (resultSet == null) {
preparedStatement.close();
return null;
}

ConcurrentHashMap<String, ServerInfo> map = new ConcurrentHashMap<>();
while (resultSet.next()) {
String host = resultSet.getString("host");
Integer port = resultSet.getInt("port");
Timestamp createTime = resultSet.getTimestamp("create_time");
int port = resultSet.getInt("port");
Timestamp updateTime = resultSet.getTimestamp("update_time");
ServerInfo serverInfo = new ServerInfo(host, port, createTime, updateTime);
serverMap.put(serverInfo.getAddr(), serverInfo);
Timestamp createTime = resultSet.getTimestamp("create_time");
map.put(host + ":" + port, new ServerInfo(host, port, createTime, updateTime));
}

resultSet.close();
preparedStatement.close();
return serverMap;
return map;
}

public List<ServerInfo> getActiveServerList() {
Expand All @@ -201,6 +204,7 @@ public List<ServerInfo> getActiveServerList() {
if (values.length == 2) {
activeServerList.add(v);
}

});
return activeServerList;
}
Expand All @@ -209,26 +213,32 @@ class HeartBeater implements Runnable {

@Override
public void run() {
try {
if (Stopper.isRunning()) {
executeUpdate(serverInfo);
if (Stopper.isRunning()) {
try {
if (isExists(serverInfo)) {
executeUpdate(serverInfo);
} else {
executeInsert(serverInfo);
}
} catch (SQLException e) {
log.error("heartbeat error", e);
}
} catch (SQLException e) {
log.error("heartbeat error", e);
}
}
}


class ServerChecker implements Runnable {

@Override
public void run() {
try {
if (Stopper.isRunning()) {

if (Stopper.isRunning()) {
try {
refreshServer();
} catch (SQLException e) {
log.error("server check error", e);
}
} catch (SQLException e) {
log.error("server check error", e);
}
}
}
Expand All @@ -240,8 +250,8 @@ private void checkConnection() throws SQLException {
}

public void close() throws SQLException {
if (connection != null) {
if (connection != null && !connection.isClosed()) {
connection.close();
}
}
}
}
25 changes: 7 additions & 18 deletions datavines-server/src/main/resources/application.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ spring:
application:
name: datavines-server
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
username: root
password: Gdp7866488
driver-class-name: org.postgresql.Driver
url: jdbc:postgresql://127.0.0.1:5432/datavines
username: postgres
password: 123456
hikari:
connection-test-query: select 1
minimum-idle: 5
Expand Down Expand Up @@ -55,7 +55,7 @@ spring:
org.quartz.jobStore.misfireThreshold: 60000
org.quartz.scheduler.batchTriggerAcquisitionMaxCount: 1
org.quartz.scheduler.makeSchedulerThreadDaemon: true
org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate
org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
org.quartz.jobStore.clusterCheckinInterval: 5000
mvc:
pathmatch:
Expand Down Expand Up @@ -87,20 +87,9 @@ spring:
on-profile: mysql
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
url: jdbc:mysql://127.0.0.1:3306/datavines?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=Asia/Shanghai
username: root
password: Gdp7866488
hikari:
connection-test-query: select 1
minimum-idle: 5
auto-commit: true
validation-timeout: 3000
pool-name: datavines
maximum-pool-size: 50
connection-timeout: 30000
idle-timeout: 600000
leak-detection-threshold: 0
initialization-fail-timeout: 1
password: 123456
quartz:
properties:
org.quartz.jobStore.driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate
4 changes: 0 additions & 4 deletions deploy/compose/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
version: '3.8'
services:
datavines:
<<<<<<< HEAD
image: datavines:latest
=======
image: datavines:dev
>>>>>>> upstream/dev
container_name: datavines
ports:
- 5600:5600
Expand Down
69 changes: 26 additions & 43 deletions deploy/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,61 +1,44 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

FROM openjdk:8

MAINTAINER 735140144

ARG VERSION=1.0.0-SNAPSHOT
ARG FLINK_VERSION=1.13.6
ARG SCALA_VERSION=2.11

ENV TZ=Asia/Shanghai \
LANG=zh_CN.UTF-8 \
FLINK_HOME=/opt/flink \
PATH=$PATH:/opt/flink/bin
ENV TZ=Asia/Shanghai
ENV LANG=zh_CN.UTF-8

WORKDIR /opt

# Install required packages
RUN apt-get update && \
apt-get install -y tini wget sudo && \
apt-get install -y tini && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

# Download and setup Flink for engine support
RUN wget --no-check-certificate https://archive.apache.org/dist/flink/flink-${FLINK_VERSION}/flink-${FLINK_VERSION}-bin-scala_${SCALA_VERSION}.tgz && \
tar -xzf flink-${FLINK_VERSION}-bin-scala_${SCALA_VERSION}.tgz && \
mv flink-${FLINK_VERSION} flink && \
rm flink-${FLINK_VERSION}-bin-scala_${SCALA_VERSION}.tgz && \
mkdir -p $FLINK_HOME/logs

# Copy and extract datavines
COPY datavines-dist/target/datavines-${VERSION}-bin.tar.gz .

RUN tar -zxvf datavines-${VERSION}-bin.tar.gz && \
mv datavines-${VERSION}-bin datavines && \
rm -rf datavines-${VERSION}-bin.tar.gz

# Fix script permissions and line endings
RUN chmod +x datavines/bin/datavines-daemon.sh && \
sed -i 's/\r//g' datavines/bin/datavines-daemon.sh && \
# Copy Flink dependencies
cp datavines/lib/datavines-flink-core*.jar $FLINK_HOME/lib/ && \
# Create necessary directories with proper permissions
mkdir -p /tmp/datavines/exec/job/flink && \
chmod -R 777 /tmp/datavines

# Set up minimal Flink configuration
RUN echo "jobmanager.memory.process.size: 1600m" >> $FLINK_HOME/conf/flink-conf.yaml && \
echo "taskmanager.memory.process.size: 1728m" >> $FLINK_HOME/conf/flink-conf.yaml && \
echo "taskmanager.numberOfTaskSlots: 4" >> $FLINK_HOME/conf/flink-conf.yaml && \
echo "parallelism.default: 2" >> $FLINK_HOME/conf/flink-conf.yaml

# Create datavines user and set permissions
RUN useradd -m -s /bin/bash datavines && \
echo "datavines ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers && \
chown -R datavines:datavines /opt/datavines /opt/flink /tmp/datavines

USER datavines

# Expose Datavines port (primary) and Flink ports (auxiliary)
EXPOSE 5600 8081 6123

# Use tini as init process
CMD ["/usr/bin/tini", "--", "datavines/bin/datavines-daemon.sh", "start_container", ""]
RUN chmod +x datavines/bin/datavines-daemon.sh && sed -i 's/\r//g' datavines/bin/datavines-daemon.sh

EXPOSE 5600

CMD ["/usr/bin/tini", "--", "datavines/bin/datavines-daemon.sh", "start_container", ""]

0 comments on commit 08ddbbb

Please sign in to comment.