This commit is contained in:
Jane
2023-12-22 10:59:10 +08:00
parent 751c43e199
commit d1ede2d4aa
2774 changed files with 291509 additions and 0 deletions

View File

@@ -0,0 +1,202 @@
#!/bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
SHELL_LOG="${DIR}/console.out"
SERVER_NAME="service-data-dts"
USER=`whoami`
SAFE_MODE=true
SUDO_USER=false
ENV_FILE_PATH="${DIR}/env.properties"
usage(){
printf "Configure usage:\n"
printf "\t%-10s %-10s %-2s \n" --server "server-name" "Name of service-data-dts server"
printf "\t%-10s %-10s %-2s \n" --unsafe "unsafe mode" "Will clean the directory existed"
printf "\t%-10s %-10s %-2s \n" --safe "safe mode" "Will not modify the directory existed (Default)"
printf "\t%-10s %-10s %-2s \n" "-h|--help" "usage" "List help document"
}
LOG(){
currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
}
interact_echo(){
while [ 1 ]; do
read -p "$1 (Y/N)" yn
if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
return 0
elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
return 1
else
echo "Unknown choise: [$yn], please choose again."
fi
done
}
is_sudo_user(){
sudo -v >/dev/null 2>&1
}
abs_path(){
SOURCE="${BASH_SOURCE[0]}"
while [ -h "${SOURCE}" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "${SOURCE}")"
[[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
done
echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
}
check_exist(){
if test -e "$1"; then
LOG INFO "Directory or file: [$1] has been exist"
if [ $2 == true ]; then
LOG INFO "Configure program will shutdown..."
exit 0
fi
fi
}
copy_replace(){
file_name=$1
if test -e "${CONF_PATH}/${file_name}";then
if [ ${SAFE_MODE} == true ]; then
check_exist "${CONF_PATH}/${file_name}" true
fi
LOG INFO "Delete file or directory: [${CONF_PATH}/${file_name}]"
rm -rf ${CONF_PATH}/${file_name}
fi
if test -e "${DIR}/../conf/${file_name}";then
LOG INFO "Copy from ${DIR}/../conf/${file_name}"
cp -R ${DIR}/../conf/${file_name} ${CONF_PATH}/
fi
}
mkdir_p(){
if [ ${SAFE_MODE} == true ]; then
check_exist $1 false
fi
if [ ! -d $1 ]; then
LOG INFO "Creating directory: ["$1"]."
#mkdir -p $1
if [ ${SUDO_USER} == true ]; then
sudo mkdir -p $1 && sudo chown -R ${USER} $1
else
mkdir -p $1
fi
fi
}
while [ 1 ]; do
case ${!OPTIND} in
--server)
SERVER_NAME=$2
shift 2
;;
--unsafe)
SAFE_MODE=false
shift 1
;;
--safe)
SAFE_MODE=true
shift 1
;;
--help|-h)
usage
exit 0
;;
*)
break
;;
esac
done
is_sudo_user
if [ $? == 0 ]; then
SUDO_USER=true
fi
BIN=`abs_path`
SERVER_NAME_SIMPLE=${SERVER_NAME/dts-servie-/}
LOG_PATH=${BIN}/../logs
if [ "x${BASE_LOG_DIR}" != "x" ]; then
LOG_PATH=${BASE_LOG_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(WEB_LOG_PATH=)\S*!\1${LOG_PATH}!g" ${ENV_FILE_PATH}
fi
CONF_PATH=${BIN}/../conf
if [ "x${BASE_CONF_DIR}" != "x" ]; then
CONF_PATH=${BASE_CONF_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(WEB_CONF_PATH=)\S*!\1${CONF_PATH}!g" ${ENV_FILE_PATH}
fi
DATA_PATH=${BIN}/../data
if [ "x${BASE_DATA_DIR}" != "x" ]; then
DATA_PATH=${BASE_DATA_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(DATA_PATH=)\S*!\1${DATA_PATH}!g" ${ENV_FILE_PATH}
fi
echo "Start to make directory"
# Start to make directory
LOG INFO "\033[1m Start to build directory\033[0m"
mkdir_p ${LOG_PATH}
mkdir_p ${CONF_PATH}
mkdir_p ${DATA_PATH}
if [ "x${BASE_CONF_DIR}" != "x" ]; then
LOG INFO "\033[1m Start to copy configuration file/directory\033[0m"
# Copy the configuration file
copy_replace bootstrap.properties
copy_replace application.yml
copy_replace logback.xml
copy_replace i18n
copy_replace mybatis-mapper
copy_replace static
fi
echo "end to make directory"
BOOTSTRAP_PROP_FILE="${CONF_PATH}/bootstrap.properties"
# Start to initalize database
echo "Start to initalize database"
if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then
`mysql --version >/dev/null 2>&1`
if [ $? == 0 ]; then
LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m"
interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?"
if [ $? == 0 ]; then
read -p "Please input the db host(default: 127.0.0.1): " HOST
if [ "x${HOST}" == "x" ]; then
HOST="127.0.0.1"
fi
while [ 1 ]; do
read -p "Please input the db port(default: 3306): " PORT
if [ "x${PORT}" == "x" ]; then
PORT=3306
break
elif [ ${PORT} -gt 0 ] 2>/dev/null; then
break
else
echo "${PORT} is not a number, please input again"
fi
done
read -p "Please input the db username(default: root): " USERNAME
if [ "x${USERNAME}" == "x" ]; then
USERNAME="root"
fi
read -p "Please input the db password(default: ""): " PASSWORD
read -p "Please input the db name(default: studio)" DATABASE
if [ "x${DATABASE}" == "x" ]; then
DATABASE="studio"
fi
mysql -h ${HOST} -P ${PORT} -u ${USERNAME} -p${PASSWORD} --default-character-set=utf8 -e \
"CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};"
sed -ri "s![#]?(DB_HOST=)\S*!\1${HOST}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_PORT=)\S*!\1${PORT}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_USERNAME=)\S*!\1${USERNAME}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_PASSWORD=)\S*!\1${PASSWORD}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_DATABASE=)\S*!\1${DATABASE}!g" ${BOOTSTRAP_PROP_FILE}
fi
fi
fi

View File

@@ -0,0 +1,9 @@
@echo off
set home=%~dp0
set conf_dir=%home%..\conf
set lib_dir=%home%..\lib\*
set log_dir=%home%..\logs
echo %conf_dir%
java -Dspring.profiles.active=standalone -Dlogging.file=%log_dir%\dbApi.log -classpath %conf_dir%;%lib_dir% com.platform.admin.Engine
pause

View File

@@ -0,0 +1,277 @@
#!/bin/bash
#
FRIEND_NAME=FLINKX-ADMIN
MAIN_CLASS=com.platform.admin.Engine
if [ ! ${ENV_FILE} ]; then
ENV_FILE="env.properties"
fi
SLEEP_TIMEREVAL_S=2
abs_path(){
SOURCE="${BASH_SOURCE[0]}"
while [ -h "${SOURCE}" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "${SOURCE}")"
[[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
done
echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
}
function LOG(){
currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
}
verify_java_env(){
if [ "x${JAVA_HOME}" != "x" ]; then
${JAVA_HOME}/bin/java -version >/dev/null 2>&1
else
java -version >/dev/null 2>&1
fi
if [ $? -ne 0 ]; then
cat 1>&2 <<EOF
+========================================================================+
| Error: Java Environment is not availiable, Please check your JAVA_HOME |
+------------------------------------------------------------------------+
EOF
return 1
fi
return 0
}
load_env(){
LOG INFO "load environment variables"
while read line
do
if [[ ! -z $(echo "${line}" | grep "=") ]]; then
key=${line%%=*}
value=${line#*=}
key1=$(echo ${key} | tr '.' '_')
if [ -z $(echo "${key1}" | grep -P '\s*#+.*') ]; then
eval "${key1}=${value}"
fi
fi
done < "${BIN}/${ENV_FILE}"
}
BIN=`abs_path`
SHELL_LOG="${BIN}/console.out"
load_env
#verify environment
verify_java_env
if [ $? -ne 0 ]; then
exit $?
fi
if [[ ! ${SERVICE_LOG_PATH} ]]; then
SERVICE_LOG_PATH=${BIN}/../logs
fi
if [[ ! ${SERVICE_CONF_PATH} ]]; then
SERVICE_CONF_PATH=${BIN}/../conf
fi
if [[ ! ${DATA_PATH} ]]; then
DATA_PATH=${BIN}/../data
fi
if [[ ! ${MAIL_USERNAME} ]]; then
MAIL_USERNAME="flinkx"
fi
if [[ ! ${MAIL_PASSWORD} ]]; then
MAIL_PASSWORD="123456"
fi
if [[ ! ${JAVA_OPTS} ]]; then
JAVA_OPTS=" -Xms2g -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8"
fi
if [[ ! ${REMOTE_DEBUG_SWITCH} ]]; then
REMOTE_DEBUG_SWITCH=false
fi
if [[ ! ${REMOTE_DEBUG_PORT} ]]; then
REMOTE_DEBUG_PORT="8089"
fi
LIB_PATH=${BIN}/../lib
USER_DIR=${BIN}/../
CLASSPATH=${LIB_PATH}"/*:"${SERVICE_CONF_PATH}":."
if [ ${REMOTE_DEBUG_SWITCH} == true ]; then
JAVA_OPTS=${JAVA_OPTS}" -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${REMOTE_DEBUG_PORT}"
fi
JAVA_OPTS=${JAVA_OPTS}" -XX:HeapDumpPath="${SERVICE_LOG_PATH}" -Dlog.path="${SERVICE_LOG_PATH}
JAVA_OPTS=${JAVA_OPTS}" -Duser.dir="${USER_DIR}
JAVA_OPTS=${JAVA_OPTS}" -Ddata.path="${DATA_PATH}" -Dmail.username="${MAIL_USERNAME}" -Dmail.password="${MAIL_PASSWORD}
if [ "x"${PID_FILE_PATH} != "x" ]; then
JAVA_OPTS=${JAVA_OPTS}" -Dpid.file="${PID_FILE_PATH}
fi
JAVA_OPTS=${JAVA_OPTS}" -Dlogging.config="${SERVICE_CONF_PATH}"/logback.xml"
JAVA_OPTS=${JAVA_OPTS}" -classpath "${CLASSPATH}
if [ "x${JAVA_HOME}" != "x" ]; then
EXE_JAVA=${JAVA_HOME}"/bin/java "${JAVA_OPTS}" "${MAIN_CLASS}
JPS=${JAVA_HOME}/bin/jps
else
EXE_JAVA="java "${JAVA_OPTS}" "${MAIN_CLASS}
JPS="jps"
fi
usage(){
echo " usage is [start|stop|shutdown|restart]"
}
# check if the process still in jvm
status_class(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "$2" | awk '{print $1}'`
fi
if [ -n "$p" ]; then
# echo "$1 ($2) is still running with pid $p"
return 0
else
# echo "$1 ($2) does not appear in the java process table"
return 1
fi
}
wait_for_startup(){
local now_s=`date '+%s'`
local stop_s=$((${now_s} + $1))
while [ ${now_s} -le ${stop_s} ];do
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 0 ]; then
return 0
fi
sleep ${SLEEP_TIMEREVAL_S}
now_s=`date '+%s'`
done
exit 1
}
wait_for_stop(){
local now_s=`date '+%s'`
local stop_s=$((${now_s} + $1))
while [ ${now_s} -le ${stop_s} ];do
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 1 ]; then
return 0
fi
sleep ${SLEEP_TIMEREVAL_S}
now_s=`date '+%s'`
done
return 1
}
start_m(){
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} has been started in process"
exit 0
fi
LOG INFO ${EXE_JAVA}
nohup ${EXE_JAVA} >${SHELL_LOG} 2>&1 &
LOG INFO "Waiting ${FRIEND_NAME} to start complete ..."
wait_for_startup 20
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} start success"
return 0
else
LOG ERROR "${FRIEND_NAME} start exceeded over 20s" >&2
return 1
fi
}
stop_m(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
fi
if [ -z "${p}" ]; then
LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
return 0
fi
LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
case "`uname`" in
CYCGWIN*) taskkill /PID "${p}" ;;
*) kill -SIGTERM "${p}" ;;
esac
LOG INFO "Waiting ${FRIEND_NAME} to stop complete ..."
wait_for_stop 20
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} stop success"
return 0
else
LOG ERROR "${FRIEND_NAME} stop exceeded over 20s" >&2
return 1
fi
}
shutdown_m(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
fi
if [ -z "${p}" ]; then
LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
return 0
fi
LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
case "`uname`" in
CYCGWIN*) taskkill /F /PID "${p}" ;;
*) kill -9 "${p}" ;;
esac
}
restart_m(){
stop_m
if [ $? -eq 0 ]; then
start_m
exit $?
else
LOG ERROR "${FRIEND_NAME} restart fail" >&2
exit 1
fi
}
if [ ! $1 ]; then
usage
exit 1;
fi
case $1 in
start) start_m;;
stop) stop_m;;
shutdown) shutdown_m;;
restart) restart_m;;
*)
usage
exit 1
;;
esac
exit $?

View File

@@ -0,0 +1,21 @@
# environment variables
#JAVA_HOME=""
WEB_LOG_PATH=${BIN}/../logs
WEB_CONF_PATH=${BIN}/../conf
DATA_PATH=${BIN}/../data
SERVER_PORT=8080
#PID_FILE_PATH=${BIN}/flinkxadmin.pid
# mail account
MAIL_USERNAME=""
MAIL_PASSWORD=""
#debug
#REMOTE_DEBUG_SWITCH=true
#REMOTE_DEBUG_PORT=7003

View File

@@ -0,0 +1,35 @@
package com.platform.admin;
import com.platform.admin.entity.Common;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.cloud.client.SpringCloudApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.core.env.Environment;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
import java.net.InetAddress;
import java.net.UnknownHostException;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* Engine是入口类该类负责数据的初始化
**/
@EnableSwagger2
@SpringCloudApplication
@EnableFeignClients(basePackages = {"cn.datax.service.system.api.feign"})
@EnableAutoConfiguration(exclude={MongoAutoConfiguration.class})
public class DataDtsServiceApplication {
public static void main(String[] args) {
SpringApplication.run(DataDtsServiceApplication.class);
}
}

View File

@@ -0,0 +1,31 @@
package com.platform.admin.base;
import cn.datax.service.system.api.dto.JwtUserDto;
import cn.datax.service.system.api.feign.UserServiceFeign;
import com.baomidou.mybatisplus.extension.api.ApiController;
import com.platform.admin.util.JwtTokenUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import java.util.Enumeration;
import static com.platform.core.util.Constants.STRING_BLANK;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* base controller
**/
@Component
public class BaseController extends ApiController {
@Autowired
UserServiceFeign userServiceFeign;
public Long getCurrentUserId(HttpServletRequest request) {
return 1L;
}
}

View File

@@ -0,0 +1,252 @@
package com.platform.admin.base;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.platform.admin.util.PageUtils;
import com.platform.admin.util.ServletUtils;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpServletRequest;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 基础参数辅助类
**/
@Slf4j
public class BaseForm {
/**
* 查询参数对象
*/
protected Map<String, Object> values = new LinkedHashMap<>();
/**
* 当前页码
*/
private Long current = 1L;
/**
* 页大小
*/
private Long size = 10L;
/**
* 构造方法
*/
public BaseForm() {
try {
HttpServletRequest request = ServletUtils.getRequest();
Enumeration<String> params = request.getParameterNames();
while (params.hasMoreElements()) {
String name = params.nextElement();
String value = StrUtil.trim(request.getParameter(name));
this.set(name, URLDecoder.decode(value, "UTF-8"));
}
this.parsePagingQueryParams();
} catch (Exception e) {
e.printStackTrace();
log.error("BaseControlForm initialize parameters setting error" + e);
}
}
/**
* 获取页码
*
* @return
*/
public Long getPageNo() {
String pageNum = StrUtil.toString(this.get("current"));
if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) {
this.current = Long.parseLong(pageNum);
}
return this.current;
}
/**
* 获取页大小
*
* @return
*/
public Long getPageSize() {
String pageSize = StrUtil.toString(this.get("size"));
if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) {
this.size = Long.parseLong(pageSize);
}
return this.size;
}
/**
* 获得参数信息对象
*
* @return
*/
public Map<String, Object> getParameters() {
return values;
}
/**
* 根据key获取values中的值
*
* @param name
* @return
*/
public Object get(String name) {
if (values == null) {
values = new LinkedHashMap<>();
return null;
}
return this.values.get(name);
}
/**
* 根据key获取values中String类型值
*
* @param key
* @return String
*/
public String getString(String key) {
return StrUtil.toString(get(key));
}
/**
* 获取排序字段
*
* @return
*/
public String getSort() {
return StrUtil.toString(this.values.get("sort"));
}
/**
* 获取排序
*
* @return
*/
public String getOrder() {
return StrUtil.toString(this.values.get("order"));
}
/**
* 获取排序
*
* @return
*/
public String getOrderby() {
return StrUtil.toString(this.values.get("orderby"));
}
/**
* 解析出mybatis plus分页查询参数
*/
public Page getPlusPagingQueryEntity() {
Page page = new Page();
//如果无current默认返回1000条数据
page.setCurrent(this.getPageNo());
page.setSize(this.getPageSize());
if (ObjectUtil.isNotNull(this.get("ifCount"))) {
page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount")));
} else {
//默认给true
page.setSearchCount(true);
}
return page;
}
/**
* 解析分页排序参数pageHelper
*/
public void parsePagingQueryParams() {
// 排序字段解析
String orderBy = StrUtil.toString(this.get("orderby")).trim();
String sortName = StrUtil.toString(this.get("sort")).trim();
String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase();
if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) {
if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) {
sortOrder = "asc";
}
this.set("orderby", sortName + " " + sortOrder);
}
}
/**
* 设置参数
*
* @param name 参数名称
* @param value 参数值
*/
public void set(String name, Object value) {
if (ObjectUtil.isNotNull(value)) {
this.values.put(name, value);
}
}
/**
* 移除参数
*
* @param name
*/
public void remove(String name) {
this.values.remove(name);
}
/**
* 清除所有参数
*/
public void clear() {
if (values != null) {
values.clear();
}
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
public QueryWrapper<?> pageQueryWrapperCustom(Map<String, Object> map, QueryWrapper<?> queryWrapper) {
// mybatis plus 分页相关的参数
Map<String, Object> pageParams = PageUtils.filterPageParams(map);
//过滤空值,分页查询相关的参数
Map<String, Object> colQueryMap = PageUtils.filterColumnQueryParams(map);
//排序 操作
pageParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
colQueryMap.forEach((k, v) -> {
switch (k) {
case "pluginName":
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@@ -0,0 +1,45 @@
package com.platform.admin.config;
import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector;
import com.baomidou.mybatisplus.core.injector.ISqlInjector;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* @Description:
**/
@EnableTransactionManagement
@Configuration
@MapperScan("com.platform.admin.mapper")
public class MybatisPlusConfig {
/**
* 分页插件
*/
@Bean
public PaginationInterceptor paginationInterceptor() {
PaginationInterceptor paginationInterceptor = new PaginationInterceptor();
return paginationInterceptor.setOverflow(true);
}
/**
* MyBatisPlus逻辑删除 ,需要在 yml 中配置开启
* 3.0.7.1版本的LogicSqlInjector里面什么都没做只是 extends DefaultSqlInjector
* 以后版本直接去的了LogicSqlInjector
*
* @return
*/
@Bean
public ISqlInjector sqlInjector() {
return new DefaultSqlInjector();
}
}

View File

@@ -0,0 +1,272 @@
package com.platform.admin.config;
import com.google.common.collect.Multimap;
import io.swagger.models.*;
import io.swagger.models.parameters.Parameter;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
import org.springframework.context.MessageSource;
import org.springframework.context.annotation.Primary;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.stereotype.Component;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.ApiListing;
import springfox.documentation.service.Documentation;
import springfox.documentation.service.ResourceListing;
import springfox.documentation.swagger2.mappers.*;
import java.util.*;
import static com.google.common.collect.Maps.newTreeMap;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* @Description:
**/
@Component(value = "ServiceModelToSwagger2Mapper")
@Primary
@ConditionalOnWebApplication
public class ServiceModelToSwagger2MapperImpl extends ServiceModelToSwagger2Mapper {
@Autowired
private ModelMapper modelMapper;
@Autowired
private ParameterMapper parameterMapper;
@Autowired
private SecurityMapper securityMapper;
@Autowired
private LicenseMapper licenseMapper;
@Autowired
private VendorExtensionsMapper vendorExtensionsMapper;
@Autowired
private MessageSource messageSource;
@Override
public Swagger mapDocumentation(Documentation from) {
if (from == null) {
return null;
}
Swagger swagger = new Swagger();
swagger.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
swagger.setSchemes(mapSchemes(from.getSchemes()));
swagger.setPaths(mapApiListings(from.getApiListings()));
swagger.setHost(from.getHost());
swagger.setDefinitions(modelsFromApiListings( from.getApiListings() ) );
swagger.setSecurityDefinitions(securityMapper.toSecuritySchemeDefinitions(from.getResourceListing()));
ApiInfo info = fromResourceListingInfo(from);
if (info != null) {
swagger.setInfo(mapApiInfo(info));
}
swagger.setBasePath(from.getBasePath());
swagger.setTags(tagSetToTagList(from.getTags()));
List<String> list2 = from.getConsumes();
if (list2 != null) {
swagger.setConsumes(new ArrayList<String>(list2));
} else {
swagger.setConsumes(null);
}
List<String> list3 = from.getProduces();
if (list3 != null) {
swagger.setProduces(new ArrayList<String>(list3));
} else {
swagger.setProduces(null);
}
return swagger;
}
@Override
protected Info mapApiInfo(ApiInfo from) {
if (from == null) {
return null;
}
Info info = new Info();
info.setLicense(licenseMapper.apiInfoToLicense(from));
info.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
info.setTermsOfService(from.getTermsOfServiceUrl());
info.setContact(map(from.getContact()));
info.setDescription(from.getDescription());
info.setVersion(from.getVersion());
info.setTitle(from.getTitle());
return info;
}
@Override
protected Contact map(springfox.documentation.service.Contact from) {
if (from == null) {
return null;
}
Contact contact = new Contact();
contact.setName(from.getName());
contact.setUrl(from.getUrl());
contact.setEmail(from.getEmail());
return contact;
}
@Override
protected Operation mapOperation(springfox.documentation.service.Operation from) {
if (from == null) {
return null;
}
Locale locale = LocaleContextHolder.getLocale();
Operation operation = new Operation();
operation.setSecurity(mapAuthorizations(from.getSecurityReferences()));
operation.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
operation.setDescription(messageSource.getMessage(from.getNotes(), null, from.getNotes(), locale));
operation.setOperationId(from.getUniqueId());
operation.setResponses(mapResponseMessages(from.getResponseMessages()));
operation.setSchemes(stringSetToSchemeList(from.getProtocol()));
Set<String> tagsSet = new HashSet<>(1);
if(from.getTags() != null && from.getTags().size() > 0){
List<String> list = new ArrayList<String>(tagsSet.size());
Iterator<String> it = from.getTags().iterator();
while(it.hasNext()){
String tag = it.next();
list.add(
StringUtils.isNotBlank(tag) ? messageSource.getMessage(tag, null, tag, locale) : " ");
}
operation.setTags(list);
}else {
operation.setTags(null);
}
operation.setSummary(from.getSummary());
Set<String> set1 = from.getConsumes();
if (set1 != null) {
operation.setConsumes(new ArrayList<String>(set1));
} else {
operation.setConsumes(null);
}
Set<String> set2 = from.getProduces();
if (set2 != null) {
operation.setProduces(new ArrayList<String>(set2));
} else {
operation.setProduces(null);
}
operation.setParameters(parameterListToParameterList(from.getParameters()));
if (from.getDeprecated() != null) {
operation.setDeprecated(Boolean.parseBoolean(from.getDeprecated()));
}
return operation;
}
@Override
protected Tag mapTag(springfox.documentation.service.Tag from) {
if (from == null) {
return null;
}
Locale locale = LocaleContextHolder.getLocale();
Tag tag = new Tag();
tag.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
tag.setName(messageSource.getMessage(from.getName(), null, from.getName(), locale));
tag.setDescription(from.getDescription());
return tag;
}
private ApiInfo fromResourceListingInfo(Documentation documentation) {
if (documentation == null) {
return null;
}
ResourceListing resourceListing = documentation.getResourceListing();
if (resourceListing == null) {
return null;
}
ApiInfo info = resourceListing.getInfo();
if (info == null) {
return null;
}
return info;
}
protected List<Tag> tagSetToTagList(Set<springfox.documentation.service.Tag> set) {
if (set == null) {
return null;
}
List<Tag> list = new ArrayList<Tag>(set.size());
for (springfox.documentation.service.Tag tag : set) {
list.add(mapTag(tag));
}
return list;
}
protected List<Scheme> stringSetToSchemeList(Set<String> set) {
if (set == null) {
return null;
}
List<Scheme> list = new ArrayList<Scheme>(set.size());
for (String string : set) {
list.add(Enum.valueOf(Scheme.class, string));
}
return list;
}
protected List<Parameter> parameterListToParameterList(List<springfox.documentation.service.Parameter> list) {
if (list == null) {
return null;
}
List<Parameter> list1 = new ArrayList<Parameter>(list.size());
Locale locale = LocaleContextHolder.getLocale();
for (springfox.documentation.service.Parameter param : list) {
String description = messageSource.getMessage(param.getDescription(), null, param.getDescription(), locale);
springfox.documentation.service.Parameter parameter = new springfox.documentation.service.Parameter(param.getName(),description,param.getDefaultValue(),param.isRequired(),param.isAllowMultiple(),param.isAllowEmptyValue(),param.getModelRef(),param.getType(),param.getAllowableValues(),param.getParamType(),param.getParamAccess(),param.isHidden(),param.getPattern(),param.getCollectionFormat(),param.getOrder(),param.getScalarExample(),param.getExamples() ,param.getVendorExtentions());
list1.add(parameterMapper.mapParameter(parameter));
}
return list1;
}
Map<String, Model> modelsFromApiListings(Multimap<String, ApiListing> apiListings) {
Map<String, springfox.documentation.schema.Model> definitions = newTreeMap();
for (ApiListing each : apiListings.values()) {
definitions.putAll(each.getModels());
}
return modelMapper.mapModels(definitions);
}
}

View File

@@ -0,0 +1,41 @@
package com.platform.admin.config;
import com.github.xiaoymin.swaggerbootstrapui.annotations.EnableSwaggerBootstrapUI;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* Swagger2API文档的配置
**/
@Configuration
@EnableSwagger2
@EnableSwaggerBootstrapUI
@ConditionalOnWebApplication
public class SwaggerConfig implements WebMvcConfigurer {
@Bean
public Docket createRestApi() {
return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo()).select()
.apis(RequestHandlerSelectors.basePackage("com.guoliang.flinkx.admin.controller")).paths(PathSelectors.any())
.build();
}
private ApiInfo apiInfo() {
return new ApiInfoBuilder().title("FlinkX Web Api Docs").description("FlinkX Web Api Docs")
.build();
}
}

View File

@@ -0,0 +1,24 @@
package com.platform.admin.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
@Configuration
@EnableWebSecurity(debug = false)
public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter {
@Bean
public BCryptPasswordEncoder bCryptPasswordEncoder() {
return new BCryptPasswordEncoder();
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().disable().authorizeRequests().anyRequest().permitAll().and().logout().permitAll();
}
}

View File

@@ -0,0 +1,51 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.dto.FlinkXJsonBuildDto;
import com.platform.admin.service.FlinkxJsonService;
import com.platform.admin.core.util.I18nUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 组装flinkx的json的控制器
**/
@RestController
@RequestMapping("/flinkxJson")
@Api(tags = "组装flinkx json的控制器")
public class FlinkxJsonController extends BaseController {
@Autowired
private FlinkxJsonService flinkxJsonService;
@PostMapping("/buildJson")
@ApiOperation("JSON构建")
public R<String> buildJobJson(@RequestBody FlinkXJsonBuildDto dto) {
String key = "system_please_choose";
if (dto.getReaderDatasourceId() == null) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerDataSource"));
}
if (dto.getWriterDatasourceId() == null) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerDataSource"));
}
if (CollectionUtils.isEmpty(dto.getReaderColumns())) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerColumns"));
}
if (CollectionUtils.isEmpty(dto.getWriterColumns())) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerColumns"));
}
return success(flinkxJsonService.buildJobJson(dto));
}
}

View File

@@ -0,0 +1,52 @@
package com.platform.admin.controller;
import com.platform.admin.service.JobService;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.ReturnT;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 首页接口
**/
@RestController
@Api(tags = "首页接口")
@RequestMapping("/api")
public class IndexController {
@Resource
private JobService jobService;
@GetMapping("/index")
@ApiOperation("监控图")
public ReturnT<Map<String, Object>> index() {
return new ReturnT<>(jobService.dashboardInfo());
}
@PostMapping("/chartInfo")
@ApiOperation("图表信息")
public ReturnT<Map<String, Object>> chartInfo() {
return jobService.chartInfo();
}
@InitBinder
public void initBinder(WebDataBinder binder) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
dateFormat.setLenient(false);
binder.registerCustomEditor(Date.class, new CustomDateEditor(dateFormat, true));
}
}

View File

@@ -0,0 +1,147 @@
package com.platform.admin.controller;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.util.JacksonUtil;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.model.HandleCallbackParam;
import com.platform.core.biz.model.HandleProcessCallbackParam;
import com.platform.core.biz.model.RegistryParam;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.JobRemotingUtil;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* API的接口
**/
@RestController
@RequestMapping("/api")
public class JobApiController {
@Resource
private AdminBiz adminBiz;
/**
* callback
*
* @param data
* @return
*/
@RequestMapping("/callback")
public ReturnT<String> callback(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
List<HandleCallbackParam> callbackParamList = null;
try {
callbackParamList = JacksonUtil.readValue(data, List.class, HandleCallbackParam.class);
} catch (Exception e) { }
if (callbackParamList==null || callbackParamList.size()==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.callback(callbackParamList);
}
/**
* callback
*
* @param data
* @return
*/
@RequestMapping("/processCallback")
public ReturnT<String> processCallback(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
List<HandleProcessCallbackParam> callbackParamList = null;
try {
callbackParamList = JacksonUtil.readValue(data, List.class, HandleProcessCallbackParam.class);
} catch (Exception e) { }
if (callbackParamList==null || callbackParamList.size()==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.processCallback(callbackParamList);
}
/**
* registry
*
* @param data
* @return
*/
@RequestMapping("/registry")
public ReturnT<String> registry(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
RegistryParam registryParam = null;
try {
registryParam = JacksonUtil.readValue(data, RegistryParam.class);
} catch (Exception e) {}
if (registryParam == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.registry(registryParam);
}
/**
* registry remove
*
* @param data
* @return
*/
@RequestMapping("/registryRemove")
public ReturnT<String> registryRemove(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
RegistryParam registryParam = null;
try {
registryParam = JacksonUtil.readValue(data, RegistryParam.class);
} catch (Exception e) {}
if (registryParam == null) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.registryRemove(registryParam);
}
}

View File

@@ -0,0 +1,78 @@
package com.platform.admin.controller;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobLogGlueMapper;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLogGlue;
import com.platform.core.biz.model.ReturnT;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.Date;
import static com.platform.core.biz.model.ReturnT.FAIL_CODE;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务状态接口
**/
@RestController
@RequestMapping("/jobcode")
@Api(tags = "任务状态接口")
public class JobCodeController {
@Resource
private JobInfoMapper jobInfoMapper;
@Resource
private JobLogGlueMapper jobLogGlueMapper;
@RequestMapping(value = "/save", method = RequestMethod.POST)
@ApiOperation("保存任务状态")
public ReturnT<String> save(Model model, int id, String glueSource, String glueRemark) {
// valid
if (glueRemark == null) {
return new ReturnT<>(FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")));
}
if (glueRemark.length() < 4 || glueRemark.length() > 100) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_remark_limit"));
}
JobInfo existsJobInfo = jobInfoMapper.loadById(id);
if (existsJobInfo == null) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
}
// update new code
existsJobInfo.setGlueSource(glueSource);
existsJobInfo.setGlueRemark(glueRemark);
existsJobInfo.setGlueUpdatetime(new Date());
existsJobInfo.setUpdateTime(new Date());
jobInfoMapper.update(existsJobInfo);
// log old code
JobLogGlue jobLogGlue = new JobLogGlue();
jobLogGlue.setJobId(existsJobInfo.getId());
jobLogGlue.setGlueType(existsJobInfo.getGlueType());
jobLogGlue.setGlueSource(glueSource);
jobLogGlue.setGlueRemark(glueRemark);
jobLogGlue.setAddTime(new Date());
jobLogGlue.setUpdateTime(new Date());
jobLogGlueMapper.save(jobLogGlue);
// remove code backup more than 30
jobLogGlueMapper.removeOld(existsJobInfo.getId(), 30);
return ReturnT.SUCCESS;
}
}

View File

@@ -0,0 +1,136 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.platform.admin.base.BaseController;
import com.platform.admin.base.BaseForm;
import com.platform.admin.service.JobDatasourceService;
import com.platform.admin.core.util.LocalCacheUtil;
import com.platform.admin.entity.JobDatasource;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* jdbc数据源配置控制器层
**/
@RestController
@RequestMapping("/jobJdbcDatasource")
@Api(tags = "jdbc数据源配置接口")
public class JobDatasourceController extends BaseController {
/**
* 服务对象
*/
@Autowired
private JobDatasourceService jobJdbcDatasourceService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@ApiImplicitParams(
{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
})
public R<IPage<JobDatasource>> selectAll() {
BaseForm form = new BaseForm();
QueryWrapper<JobDatasource> query = (QueryWrapper<JobDatasource>) form.pageQueryWrapperCustom(form.getParameters(), new QueryWrapper<JobDatasource>());
Page jdbcList = jobJdbcDatasourceService.page(form.getPlusPagingQueryEntity(), query);
return success(jdbcList);
}
/**
* 获取所有数据源
* @return
*/
@ApiOperation("获取所有数据源")
@GetMapping("/all")
public R<List<JobDatasource>> selectAllDatasource() {
return success(this.jobJdbcDatasourceService.selectAllDatasource());
}
/**
* 通过主键查询单条数据
*
* @param id 主键
* @return 单条数据
*/
@ApiOperation("通过主键查询单条数据")
@GetMapping("{id}")
public R<JobDatasource> selectOne(@PathVariable Serializable id) {
return success(this.jobJdbcDatasourceService.getById(id));
}
/**
* 新增数据
*
* @param entity 实体对象
* @return 新增结果
*/
@ApiOperation("新增数据")
@PostMapping
public R<Boolean> insert(@RequestBody JobDatasource entity) {
return success(this.jobJdbcDatasourceService.save(entity));
}
/**
* 修改数据
*
* @param entity 实体对象
* @return 修改结果
*/
@PutMapping
@ApiOperation("修改数据")
public R<Boolean> update(@RequestBody JobDatasource entity) {
LocalCacheUtil.remove(entity.getDatasourceName());
JobDatasource d = jobJdbcDatasourceService.getById(entity.getId());
if (null != d.getJdbcUsername() && entity.getJdbcUsername().equals(d.getJdbcUsername())) {
entity.setJdbcUsername(null);
}
if (null != entity.getJdbcPassword() && entity.getJdbcPassword().equals(d.getJdbcPassword())) {
entity.setJdbcPassword(null);
}
return success(this.jobJdbcDatasourceService.updateById(entity));
}
/**
* 删除数据
*
* @param idList 主键结合
* @return 删除结果
*/
@DeleteMapping
@ApiOperation("删除数据")
public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
return success(this.jobJdbcDatasourceService.removeByIds(idList));
}
/**
* 测试数据源
* @param jobJdbcDatasource
* @return
*/
@PostMapping("/test")
@ApiOperation("测试数据")
public R<Boolean> dataSourceTest (@RequestBody JobDatasource jobJdbcDatasource) throws IOException {
return success(jobJdbcDatasourceService.dataSourceTest(jobJdbcDatasource));
}
}

View File

@@ -0,0 +1,175 @@
package com.platform.admin.controller;
import com.platform.admin.mapper.JobGroupMapper;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobRegistryMapper;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.enums.RegistryConfig;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobRegistry;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.util.*;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 执行器管理接口
**/
@RestController
@RequestMapping("/jobGroup")
@Api(tags = "执行器管理接口")
public class JobGroupController {
@Resource
public JobInfoMapper jobInfoMapper;
@Resource
public JobGroupMapper jobGroupMapper;
@Resource
private JobRegistryMapper jobRegistryMapper;
@GetMapping("/list")
@ApiOperation("执行器列表")
public ReturnT<List<JobGroup>> getExecutorList() {
return new ReturnT<>(jobGroupMapper.findAll());
}
@PostMapping("/save")
@ApiOperation("新建执行器")
public ReturnT<String> save(@RequestBody JobGroup jobGroup) {
// valid
if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
}
if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
}
if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
}
if (jobGroup.getAddressType() != 0) {
if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
}
String[] addresses = jobGroup.getAddressList().split(",");
for (String item : addresses) {
if (item == null || item.trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
}
}
}
int ret = jobGroupMapper.save(jobGroup);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
@PostMapping("/update")
@ApiOperation("更新执行器")
public ReturnT<String> update(@RequestBody JobGroup jobGroup) {
// valid
if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
}
if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
}
if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
}
if (jobGroup.getAddressType() == 0) {
// 0=自动注册
List<String> registryList = findRegistryByAppName(jobGroup.getAppName());
String addressListStr = null;
if (registryList != null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item : registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length() - 1);
}
jobGroup.setAddressList(addressListStr);
} else {
// 1=手动录入
if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
}
String[] addresses = jobGroup.getAddressList().split(",");
for (String item : addresses) {
if (item == null || item.trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
}
}
}
int ret = jobGroupMapper.update(jobGroup);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
private List<String> findRegistryByAppName(String appNameParam) {
HashMap<String, List<String>> appAddressMap = new HashMap<>();
List<JobRegistry> list = jobRegistryMapper.findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (JobRegistry item : list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appName = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appName);
if (registryList == null) {
registryList = new ArrayList<>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appName, registryList);
}
}
}
return appAddressMap.get(appNameParam);
}
@PostMapping("/remove")
@ApiOperation("移除执行器")
public ReturnT<String> remove(int id) {
// valid
int count = jobInfoMapper.pageListCount(0, 10, id, -1, null, null, 0,null);
if (count > 0) {
return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_0"));
}
List<JobGroup> allList = jobGroupMapper.findAll();
if (allList.size() == 1) {
return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_1"));
}
int ret = jobGroupMapper.remove(id);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
@RequestMapping(value = "/loadById", method = RequestMethod.POST)
@ApiOperation("根据id获取执行器")
public ReturnT<JobGroup> loadById(int id) {
JobGroup jobGroup = jobGroupMapper.load(id);
return jobGroup != null ? new ReturnT<>(jobGroup) : new ReturnT<>(ReturnT.FAIL_CODE, null);
}
@GetMapping("/query")
@ApiOperation("查询执行器")
public ReturnT<List<JobGroup>> get(@ApiParam(value = "执行器AppName")
@RequestParam(value = "appName", required = false) String appName,
@ApiParam(value = "执行器名称")
@RequestParam(value = "title", required = false) String title,
@ApiParam(value = "执行器地址列表")
@RequestParam(value = "addressList", required = false) String addressList) {
return new ReturnT<>(jobGroupMapper.find(appName, title, addressList));
}
}

View File

@@ -0,0 +1,137 @@
package com.platform.admin.controller;
import com.platform.admin.base.BaseController;
import com.platform.admin.dto.FlinkXBatchJsonBuildDto;
import com.platform.admin.dto.TriggerJobDto;
import com.platform.admin.service.JobService;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.DateUtil;
import com.platform.admin.core.cron.CronExpression;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobInfo;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务配置接口
**/
@Api(tags = "任务配置接口")
@RestController
@RequestMapping("/job")
public class JobInfoController extends BaseController {
@Resource
private JobService jobService;
@GetMapping("/pageList")
@ApiOperation("任务列表")
public ReturnT<Map<String, Object>> pageList(@RequestParam(value = "current", required = false, defaultValue = "0") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam("jobGroup") int jobGroup, @RequestParam("triggerStatus") int triggerStatus,
@RequestParam("jobDesc") String jobDesc, @RequestParam("glueType") String glueType,
@RequestParam("projectIds") Integer[] projectIds) {
return new ReturnT<>(jobService.pageList((current-1)*size, size, jobGroup, triggerStatus, jobDesc, glueType, 0, projectIds));
}
@GetMapping("/list")
@ApiOperation("全部任务列表")
public ReturnT<List<JobInfo>> list(){
return new ReturnT<>(jobService.list());
}
@PostMapping("/add")
@ApiOperation("添加任务")
public ReturnT<String> add(HttpServletRequest request, @RequestBody JobInfo jobInfo) {
jobInfo.setUserId(getCurrentUserId(request));
return jobService.add(jobInfo);
}
@PostMapping("/update")
@ApiOperation("更新任务")
public ReturnT<String> update(HttpServletRequest request,@RequestBody JobInfo jobInfo) {
jobInfo.setUserId(getCurrentUserId(request));
return jobService.update(jobInfo);
}
@PostMapping(value = "/remove/{id}")
@ApiOperation("移除任务")
public ReturnT<String> remove(@PathVariable(value = "id") int id) {
return jobService.remove(id);
}
@RequestMapping(value = "/stop",method = RequestMethod.POST)
@ApiOperation("停止任务")
public ReturnT<String> pause(int id) {
return jobService.stop(id);
}
@RequestMapping(value = "/start",method = RequestMethod.POST)
@ApiOperation("开启任务")
public ReturnT<String> start(int id) {
return jobService.start(id);
}
@PostMapping(value = "/trigger")
@ApiOperation("触发任务")
public ReturnT<String> triggerJob(@RequestBody TriggerJobDto dto) {
try {
String executorParam=dto.getExecutorParam();
if (executorParam == null) {
executorParam = "";
}
JobTriggerPoolHelper jobTriggerPoolHelper = new JobTriggerPoolHelper();
jobTriggerPoolHelper.runJob(dto.getJobId());
} catch (Exception e) {
return ReturnT.FAIL;
}
return ReturnT.SUCCESS;
}
@GetMapping("/nextTriggerTime")
@ApiOperation("获取近5次触发时间")
public ReturnT<List<String>> nextTriggerTime(String cron) {
List<String> result = new ArrayList<>();
try {
CronExpression cronExpression = new CronExpression(cron);
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = cronExpression.getNextValidTimeAfter(lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (ParseException e) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
}
return new ReturnT<>(result);
}
@PostMapping("/batchAdd")
@ApiOperation("批量创建任务")
public ReturnT<String> batchAdd(@RequestBody FlinkXBatchJsonBuildDto dto) throws IOException {
if (dto.getTemplateId() ==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_temp")));
}
return jobService.batchAdd(dto);
}
}

View File

@@ -0,0 +1,194 @@
package com.platform.admin.controller;
import com.platform.admin.entity.JobLog;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobLogMapper;
import com.platform.core.biz.ExecutorBiz;
import com.platform.core.biz.model.LogResult;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.DateUtil;
import com.platform.admin.core.kill.KillJob;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobInfo;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务运行日志接口
**/
@RestController
@RequestMapping("/log")
@Api(tags = "任务运行日志接口")
public class JobLogController {
private static Logger logger = LoggerFactory.getLogger(JobLogController.class);
@Resource
public JobInfoMapper jobInfoMapper;
@Resource
public JobLogMapper jobLogMapper;
@GetMapping("/pageList")
@ApiOperation("运行日志列表")
public ReturnT<Map<String, Object>> pageList(
@RequestParam(value = "current", required = false, defaultValue = "0") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "jobGroup") int jobGroup, @RequestParam(value = "jobId") int jobId,
@RequestParam(value = "logStatus") int logStatus, @RequestParam(value = "filterTime") String filterTime) {
// valid permission
//JobInfoController.validPermission(request, jobGroup); // 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup
// parse param
Date triggerTimeStart = null;
Date triggerTimeEnd = null;
if (filterTime != null && filterTime.trim().length() > 0) {
String[] temp = filterTime.split(" - ");
if (temp.length == 2) {
triggerTimeStart = DateUtil.parseDateTime(temp[0]);
triggerTimeEnd = DateUtil.parseDateTime(temp[1]);
}
}
// page query
List<JobLog> data = jobLogMapper.pageList((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
int cnt = jobLogMapper.pageListCount((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
// package result
Map<String, Object> maps = new HashMap<>();
maps.put("recordsTotal", cnt); // 总记录数
maps.put("recordsFiltered", cnt); // 过滤后的总记录数
maps.put("data", data); // 分页列表
return new ReturnT<>(maps);
}
@RequestMapping(value = "/logDetailCat", method = RequestMethod.GET)
@ApiOperation("运行日志详情")
public ReturnT<LogResult> logDetailCat(HttpServletRequest request,String executorAddress) {
//添加日志审计功能
try {
InputStream in = new FileInputStream(executorAddress);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) != -1) {
bos.write(buf, 0, len);
}
String logContent = new String(bos.toByteArray());
if (bos != null) {
bos.close();
}
if (in != null) {
in.close();
}
//@TODO 查看日志
ReturnT<LogResult> returnT = new ReturnT<>(ReturnT.SUCCESS_CODE, "查询日志成功");
LogResult logResult = new LogResult(0, 0, logContent, true);
returnT.setContent(logResult);
return returnT;
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<>(ReturnT.FAIL_CODE, e.getMessage());
}
}
@RequestMapping(value = "/logKill", method = RequestMethod.POST)
@ApiOperation("kill任务")
public ReturnT<String> logKill(int id) {
// base check
JobLog log = jobLogMapper.load(id);
JobInfo jobInfo = jobInfoMapper.loadById(log.getJobId());
if (jobInfo == null) {
return new ReturnT<>(500, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
}
if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) {
return new ReturnT<>(500, I18nUtil.getString("joblog_kill_log_limit"));
}
// request of kill
ReturnT<String> runResult;
try {
// ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(log.getExecutorAddress());
// runResult = executorBiz.kill(jobInfo.getId());
} catch (Exception e) {
logger.error(e.getMessage(), e);
runResult = new ReturnT<>(500, e.getMessage());
}
// if (ReturnT.SUCCESS_CODE == runResult.getCode()) {
// log.setHandleCode(ReturnT.FAIL_CODE);
// log.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : ""));
// log.setHandleTime(new Date());
// jobLogMapper.updateHandleInfo(log);
// return new ReturnT<>(runResult.getMsg());
// } else {
// return new ReturnT<>(500, runResult.getMsg());
// }
return null;
}
@PostMapping("/clearLog")
@ApiOperation("清理日志")
public ReturnT<String> clearLog(int jobGroup, int jobId, int type) {
Date clearBeforeTime = null;
int clearBeforeNum = 0;
if (type == 1) {
clearBeforeTime = DateUtil.addMonths(new Date(), -1); // 清理一个月之前日志数据
} else if (type == 2) {
clearBeforeTime = DateUtil.addMonths(new Date(), -3); // 清理三个月之前日志数据
} else if (type == 3) {
clearBeforeTime = DateUtil.addMonths(new Date(), -6); // 清理六个月之前日志数据
} else if (type == 4) {
clearBeforeTime = DateUtil.addYears(new Date(), -1); // 清理一年之前日志数据
} else if (type == 5) {
clearBeforeNum = 1000; // 清理一千条以前日志数据
} else if (type == 6) {
clearBeforeNum = 10000; // 清理一万条以前日志数据
} else if (type == 7) {
clearBeforeNum = 30000; // 清理三万条以前日志数据
} else if (type == 8) {
clearBeforeNum = 100000; // 清理十万条以前日志数据
} else if (type == 9) {
clearBeforeNum = 0; // 清理所有日志数据
} else {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_invalid"));
}
List<Long> logIds;
do {
logIds = jobLogMapper.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000);
if (logIds != null && logIds.size() > 0) {
jobLogMapper.clearLog(logIds);
}
} while (logIds != null && logIds.size() > 0);
return ReturnT.SUCCESS;
}
@ApiOperation("停止该job作业")
@PostMapping("/killJob")
public ReturnT<String> killJob(@RequestBody JobLog log) {
//获取到任务的ID执行脚本程序杀掉
//@TODO 停掉作业
String processId = log.getProcessId();
return KillJob.trigger(processId);
}
}

View File

@@ -0,0 +1,115 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.service.JobProjectService;
import com.platform.admin.entity.JobProject;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 项目管理模块
**/
@RestController
@RequestMapping("/jobProject")
@Api(tags = "项目管理模块")
public class JobProjectController extends BaseController {
@Autowired
private JobProjectService jobProjectService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
public R<IPage<JobProject>> selectAll(@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize,
@RequestParam("pageNo") Integer pageNo) {
return success(jobProjectService.getProjectListPaging(pageSize, pageNo, searchVal));
}
/**
* Get all project
*
* @return
*/
@ApiOperation("获取所有数据")
@GetMapping("/list")
public R<List<JobProject>> selectList() {
QueryWrapper<JobProject> query = new QueryWrapper();
query.eq("flag", true);
return success(jobProjectService.list(query));
}
/**
* 通过主键查询单条数据
*
* @param id 主键
* @return 单条数据
*/
@ApiOperation("通过主键查询单条数据")
@GetMapping("{id}")
public R<JobProject> selectOne(@PathVariable Serializable id) {
return success(this.jobProjectService.getById(id));
}
/**
* 新增数据
*
* @param entity 实体对象
* @return 新增结果
*/
@ApiOperation("新增数据")
@PostMapping
public R<Boolean> insert(HttpServletRequest request, @RequestBody JobProject entity) {
entity.setUserId(getCurrentUserId(request));
entity.setCreateTime(new Date());
return success(this.jobProjectService.save(entity));
}
/**
* 修改数据
*
* @param entity 实体对象
* @return 修改结果
*/
@PutMapping
@ApiOperation("修改数据")
public R<Boolean> update(@RequestBody JobProject entity) {
JobProject project = jobProjectService.getById(entity.getId());
project.setName(entity.getName());
project.setDescription(entity.getDescription());
project.setUpdateTime(new Date());
return success(this.jobProjectService.updateById(entity));
}
/**
* 删除数据
*
* @param idList 主键结合
* @return 删除结果
*/
@DeleteMapping
@ApiOperation("删除数据")
public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
return success(this.jobProjectService.removeByIds(idList));
}
}

View File

@@ -0,0 +1,95 @@
package com.platform.admin.controller;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.base.BaseForm;
import com.platform.admin.service.JobRegistryService;
import com.platform.admin.entity.JobRegistry;
import com.platform.admin.util.PageUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 执行器资源监控
**/
@RestController
@RequestMapping("/jobRegistry")
@Api(tags = "执行器资源监控")
public class JobRegistryController extends BaseController {
@Autowired
private JobRegistryService jobRegistryService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@ApiImplicitParams(
{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
})
public R<IPage<JobRegistry>> selectAll() {
BaseForm baseForm = new BaseForm();
return success(this.jobRegistryService.page(baseForm.getPlusPagingQueryEntity(), pageQueryWrapperCustom(baseForm.getParameters())));
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
protected QueryWrapper<JobRegistry> pageQueryWrapperCustom(Map<String, Object> map) {
// mybatis plus 分页相关的参数
Map<String, Object> pageHelperParams = PageUtils.filterPageParams(map);
//过滤空值,分页查询相关的参数
Map<String, Object> columnQueryMap = PageUtils.filterColumnQueryParams(map);
QueryWrapper<JobRegistry> queryWrapper = new QueryWrapper<>();
//排序 操作
pageHelperParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
columnQueryMap.forEach((k, v) -> {
switch (k) {
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@@ -0,0 +1,89 @@
package com.platform.admin.controller;
import com.platform.admin.base.BaseController;
import com.platform.admin.entity.JobTemplate;
import com.platform.admin.service.JobTemplateService;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.DateUtil;
import com.platform.admin.core.cron.CronExpression;
import com.platform.admin.core.util.I18nUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务模板
**/
@RestController
@RequestMapping("/jobTemplate")
@Api(tags = "任务配置接口")
public class JobTemplateController extends BaseController {
@Resource
private JobTemplateService jobTemplateService;
@GetMapping("/pageList")
@ApiOperation("任务模板列表")
public ReturnT<Map<String, Object>> pageList(@RequestParam(value = "current", required = false, defaultValue = "0") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "jobGroup") int jobGroup, @RequestParam(value = "jobDesc") String jobDesc,
@RequestParam(value = "executorHandler") String executorHandler,
@RequestParam(value = "userId") int userId, @RequestParam(value = "projectIds",required = false) Integer[] projectIds) {
return new ReturnT<>(jobTemplateService.pageList((current-1)*size, size, jobGroup, jobDesc, executorHandler, userId, projectIds));
}
@PostMapping("/add")
@ApiOperation("添加任务模板")
public ReturnT<String> add(HttpServletRequest request, @RequestBody JobTemplate jobTemplate) {
jobTemplate.setUserId(getCurrentUserId(request));
return jobTemplateService.add(jobTemplate);
}
@PostMapping("/update")
@ApiOperation("更新任务")
public ReturnT<String> update(HttpServletRequest request,@RequestBody JobTemplate jobTemplate) {
jobTemplate.setUserId(getCurrentUserId(request));
return jobTemplateService.update(jobTemplate);
}
@PostMapping(value = "/remove/{id}")
@ApiOperation("移除任务模板")
public ReturnT<String> remove(@PathVariable(value = "id") int id) {
return jobTemplateService.remove(id);
}
@GetMapping("/nextTriggerTime")
@ApiOperation("获取近5次触发时间")
public ReturnT<List<String>> nextTriggerTime(String cron) {
List<String> result = new ArrayList<>();
try {
CronExpression cronExpression = new CronExpression(cron);
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = cronExpression.getNextValidTimeAfter(lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (ParseException e) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
}
return new ReturnT<>(result);
}
}

View File

@@ -0,0 +1,103 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.service.DatasourceQueryService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 查询数据库表名,字段的控制器
**/
@RestController
@RequestMapping("/metadata")
@Api(tags = "jdbc数据库查询控制器")
public class MetadataController extends BaseController {
@Autowired
private DatasourceQueryService datasourceQueryService;
/**
* 根据数据源id获取mongo库名
*
* @param datasourceId
* @return
*/
@GetMapping("/getDBs")
@ApiOperation("根据数据源id获取mongo库名")
public R<List<String>> getDBs(Long datasourceId) throws IOException {
return success(datasourceQueryService.getDBs(datasourceId));
}
/**
* 根据数据源id,dbname获取CollectionNames
*
* @param datasourceId
* @return
*/
@GetMapping("/collectionNames")
@ApiOperation("根据数据源id,dbname获取CollectionNames")
public R<List<String>> getCollectionNames(Long datasourceId,String dbName) throws IOException {
return success(datasourceQueryService.getCollectionNames(datasourceId,dbName));
}
/**
* 获取PG table schema
*
* @param datasourceId
* @return
*/
@GetMapping("/getDBSchema")
@ApiOperation("根据数据源id获取 db schema")
public R<List<String>> getTableSchema(Long datasourceId) {
return success(datasourceQueryService.getTableSchema(datasourceId));
}
/**
* 根据数据源id获取可用表名
*
* @param datasourceId
* @return
*/
@GetMapping("/getTables")
@ApiOperation("根据数据源id获取可用表名")
public R<List<String>> getTableNames(Long datasourceId,String tableSchema) throws IOException {
return success(datasourceQueryService.getTables(datasourceId,tableSchema));
}
/**
* 根据数据源id和表名获取所有字段
*
* @param datasourceId 数据源id
* @param tableName 表名
* @return
*/
@GetMapping("/getColumns")
@ApiOperation("根据数据源id和表名获取所有字段")
public R<List<String>> getColumns(Long datasourceId, String tableName) throws IOException {
return success(datasourceQueryService.getColumns(datasourceId, tableName));
}
/**
* 根据数据源id和sql语句获取所有字段
*
* @param datasourceId 数据源id
* @param querySql 表名
* @return
*/
@GetMapping("/getColumnsByQuerySql")
@ApiOperation("根据数据源id和sql语句获取所有字段")
public R<List<String>> getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException {
return success(datasourceQueryService.getColumnsByQuerySql(datasourceId, querySql));
}
}

View File

@@ -0,0 +1,152 @@
package com.platform.admin.controller;
import cn.hutool.core.util.StrUtil;
import com.platform.core.biz.model.ReturnT;
import com.platform.admin.mapper.JobUserMapper;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobUser;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.platform.core.biz.model.ReturnT.FAIL_CODE;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 用户管理模块
**/
@RestController
@RequestMapping("/user")
@Api(tags = "用户信息接口")
public class UserController {
@Resource
private JobUserMapper jobUserMapper;
@Resource
private BCryptPasswordEncoder bCryptPasswordEncoder;
@GetMapping("/pageList")
@ApiOperation("用户列表")
public ReturnT<Map<String, Object>> pageList(@RequestParam(value = "current", required = false, defaultValue = "1") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "username", required = false) String username) {
// page list
List<JobUser> list = jobUserMapper.pageList((current - 1) * size, size, username);
int recordsTotal = jobUserMapper.pageListCount((current - 1) * size, size, username);
// package result
Map<String, Object> maps = new HashMap<>();
maps.put("recordsTotal", recordsTotal); // 总记录数
maps.put("recordsFiltered", recordsTotal); // 过滤后的总记录数
maps.put("data", list); // 分页列表
return new ReturnT<>(maps);
}
@GetMapping("/list")
@ApiOperation("用户列表")
public ReturnT<List<JobUser>> list(String username) {
// page list
List<JobUser> list = jobUserMapper.findAll(username);
return new ReturnT<>(list);
}
@GetMapping("/getUserById")
@ApiOperation(value = "根据id获取用户")
public ReturnT<JobUser> selectById(@RequestParam("userId") Integer userId) {
return new ReturnT<>(jobUserMapper.getUserById(userId));
}
@PostMapping("/add")
@ApiOperation("添加用户")
public ReturnT<String> add(@RequestBody JobUser jobUser) {
// valid username
if (!StringUtils.hasText(jobUser.getUsername())) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_please_input") + I18nUtil.getString("user_username"));
}
jobUser.setUsername(jobUser.getUsername().trim());
if (!(jobUser.getUsername().length() >= 4 && jobUser.getUsername().length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
// valid password
if (!StringUtils.hasText(jobUser.getPassword())) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_please_input") + I18nUtil.getString("user_password"));
}
jobUser.setPassword(jobUser.getPassword().trim());
if (!(jobUser.getPassword().length() >= 4 && jobUser.getPassword().length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
jobUser.setPassword(bCryptPasswordEncoder.encode(jobUser.getPassword()));
// check repeat
JobUser existUser = jobUserMapper.loadByUserName(jobUser.getUsername());
if (existUser != null) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("user_username_repeat"));
}
// write
jobUserMapper.save(jobUser);
return ReturnT.SUCCESS;
}
@PostMapping(value = "/update")
@ApiOperation("更新用户信息")
public ReturnT<String> update(@RequestBody JobUser jobUser) {
if (StringUtils.hasText(jobUser.getPassword())) {
String pwd = jobUser.getPassword().trim();
if (StrUtil.isBlank(pwd)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_no_blank") + "密码");
}
if (!(pwd.length() >= 4 && pwd.length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
jobUser.setPassword(bCryptPasswordEncoder.encode(pwd));
} else {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_no_blank") + "密码");
}
// write
jobUserMapper.update(jobUser);
return ReturnT.SUCCESS;
}
@RequestMapping(value = "/remove", method = RequestMethod.POST)
@ApiOperation("删除用户")
public ReturnT<String> remove(int id) {
int result = jobUserMapper.delete(id);
return result != 1 ? ReturnT.FAIL : ReturnT.SUCCESS;
}
@PostMapping(value = "/updatePwd")
@ApiOperation("修改密码")
public ReturnT<String> updatePwd(@RequestBody JobUser jobUser) {
String password = jobUser.getPassword();
if (password == null || password.trim().length() == 0) {
return new ReturnT<>(ReturnT.FAIL.getCode(), "密码不可为空");
}
password = password.trim();
if (!(password.length() >= 4 && password.length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
// do write
JobUser existUser = jobUserMapper.loadByUserName(jobUser.getUsername());
existUser.setPassword(bCryptPasswordEncoder.encode(password));
jobUserMapper.update(existUser);
return ReturnT.SUCCESS;
}
}

View File

@@ -0,0 +1,143 @@
package com.platform.admin.core.conf;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class ExcecutorConfig implements InitializingBean, DisposableBean {
private static ExcecutorConfig excecutorConfig = null;
public static ExcecutorConfig getExcecutorConfig() {
return excecutorConfig;
}
@Override
public void afterPropertiesSet() throws Exception {
excecutorConfig = this;
}
@Override
public void destroy() throws Exception {
}
@Value("${dts.executor.chunjunHome}")
private String flinkxHome;
@Value("${dts.executor.chunjunjsonPath}")
private String flinkxjsonPath;
@Value("${dts.executor.chunjunlogHome}")
private String flinkxlogHome;
@Value("${dts.executor.dataxHome}")
private String dataxHome;
@Value("${dts.executor.dataxjsonPath}")
private String dataxjsonPath;
@Value("${dts.executor.dataxlogHome}")
private String dataxlogHome;
@Value("${common.mysql.dts.url}")
private String url;
@Value("${common.mysql.dts.driver-class-name}")
private String driverClassname;
@Value("${common.mysql.dts.username}")
private String username;
@Value("${common.mysql.dts.password}")
private String password;
public static void setExcecutorConfig(ExcecutorConfig excecutorConfig) {
ExcecutorConfig.excecutorConfig = excecutorConfig;
}
public String getFlinkxHome() {
return flinkxHome;
}
public void setFlinkxHome(String flinkxHome) {
this.flinkxHome = flinkxHome;
}
public String getFlinkxjsonPath() {
return flinkxjsonPath;
}
public void setFlinkxjsonPath(String flinkxjsonPath) {
this.flinkxjsonPath = flinkxjsonPath;
}
public String getFlinkxlogHome() {
return flinkxlogHome;
}
public void setFlinkxlogHome(String flinkxlogHome) {
this.flinkxlogHome = flinkxlogHome;
}
public String getDataxHome() {
return dataxHome;
}
public void setDataxHome(String dataxHome) {
this.dataxHome = dataxHome;
}
public String getDataxjsonPath() {
return dataxjsonPath;
}
public void setDataxjsonPath(String dataxjsonPath) {
this.dataxjsonPath = dataxjsonPath;
}
public String getDataxlogHome() {
return dataxlogHome;
}
public void setDataxlogHome(String dataxlogHome) {
this.dataxlogHome = dataxlogHome;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getDriverClassname() {
return driverClassname;
}
public void setDriverClassname(String driverClassname) {
this.driverClassname = driverClassname;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}

View File

@@ -0,0 +1,161 @@
package com.platform.admin.core.conf;
import com.platform.admin.mapper.*;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.mapper.JobDatasourceMapper;
import com.platform.admin.mapper.JobGroupMapper;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobLogMapper;
import com.platform.admin.mapper.JobLogReportMapper;
import com.platform.admin.mapper.JobRegistryMapper;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import javax.sql.DataSource;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* xxl-job config
**/
@Component
public class JobAdminConfig implements InitializingBean, DisposableBean {
private static JobAdminConfig adminConfig = null;
public static JobAdminConfig getAdminConfig() {
return adminConfig;
}
// ---------------------- XxlJobScheduler ----------------------
private JobScheduler xxlJobScheduler;
@Override
public void afterPropertiesSet() throws Exception {
adminConfig = this;
xxlJobScheduler = new JobScheduler();
xxlJobScheduler.init();
}
@Override
public void destroy() throws Exception {
xxlJobScheduler.destroy();
}
// ---------------------- XxlJobScheduler ----------------------
// conf
@Value("${dts.job.i18n}")
private String i18n;
@Value("${dts.job.accessToken}")
private String accessToken;
@Value("${spring.mail.username}")
private String emailUserName;
@Value("${dts.job.triggerpool.fast.max}")
private int triggerPoolFastMax;
@Value("${dts.job.triggerpool.slow.max}")
private int triggerPoolSlowMax;
@Value("${dts.job.logretentiondays}")
private int logretentiondays;
@Value("${datasource.aes.key}")
private String dataSourceAESKey;
// dao, service
@Resource
private JobLogMapper jobLogMapper;
@Resource
private JobInfoMapper jobInfoMapper;
@Resource
private JobRegistryMapper jobRegistryMapper;
@Resource
private JobGroupMapper jobGroupMapper;
@Resource
private JobLogReportMapper jobLogReportMapper;
@Resource
private JavaMailSender mailSender;
@Resource
private DataSource dataSource;
@Resource
private JobDatasourceMapper jobDatasourceMapper;
public String getI18n() {
return i18n;
}
public String getAccessToken() {
return accessToken;
}
public String getEmailUserName() {
return emailUserName;
}
public int getTriggerPoolFastMax() {
return triggerPoolFastMax < 200 ? 200 : triggerPoolFastMax;
}
public int getTriggerPoolSlowMax() {
return triggerPoolSlowMax < 100 ? 100 : triggerPoolSlowMax;
}
public int getLogretentiondays() {
return logretentiondays < 7 ? -1 : logretentiondays;
}
public JobLogMapper getJobLogMapper() {
return jobLogMapper;
}
public JobInfoMapper getJobInfoMapper() {
return jobInfoMapper;
}
public JobRegistryMapper getJobRegistryMapper() {
return jobRegistryMapper;
}
public JobGroupMapper getJobGroupMapper() {
return jobGroupMapper;
}
public JobLogReportMapper getJobLogReportMapper() {
return jobLogReportMapper;
}
public JavaMailSender getMailSender() {
return mailSender;
}
public DataSource getDataSource() {
return dataSource;
}
public JobDatasourceMapper getJobDatasourceMapper() {
return jobDatasourceMapper;
}
public String getDataSourceAESKey() {
return dataSourceAESKey;
}
public void setDataSourceAESKey(String dataSourceAESKey) {
this.dataSourceAESKey = dataSourceAESKey;
}
}

View File

@@ -0,0 +1,52 @@
package com.platform.admin.core.conf;
import com.platform.core.executor.impl.JobSpringExecutor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @ClassName XxlJobConfig
* @Description: xxl-job依赖配置
* @authorAllDataDC
*/
@Configuration
@Data
@Slf4j
public class XxlJobConfig {
@Value("${xxl.job.admin.addresses}")
private String adminAddresses;
@Value("${xxl.job.executor.appname}")
private String appname;
@Value("${xxl.job.executor.port}")
private int port;
@Value("${xxl.job.executor.logpath}")
private String logPath;
@Value("${xxl.job.executor.logretentiondays}")
private int logRetentionDays;
@Bean
public JobSpringExecutor xxlJobExecutor() {
System.out.println("=============== xxl-job config init.===============");
JobSpringExecutor xxlJobSpringExecutor = new JobSpringExecutor();
xxlJobSpringExecutor.setAdminAddresses(adminAddresses);
xxlJobSpringExecutor.setAppName(appname);
// xxlJobSpringExecutor.setAddress(address);
// xxlJobSpringExecutor.setIp(ip);
xxlJobSpringExecutor.setPort(port);
// xxlJobSpringExecutor.setAccessToken(accessToken);
xxlJobSpringExecutor.setLogPath(logPath);
xxlJobSpringExecutor.setLogRetentionDays(logRetentionDays);
return xxlJobSpringExecutor;
}
}

View File

@@ -0,0 +1,51 @@
package com.platform.admin.core.handler;
import com.platform.admin.util.AESUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* @Description:
**/
@MappedTypes({String.class})
public class AESEncryptHandler extends BaseTypeHandler<String> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, String parameter, JdbcType jdbcType) throws SQLException {
if(StringUtils.isNotBlank(parameter)){
ps.setString(i, AESUtil.encrypt(parameter));
}else{
ps.setString(i, null);
}
}
@Override
public String getNullableResult(ResultSet rs, String columnName) throws SQLException {
String columnValue = rs.getString(columnName);
return AESUtil.decrypt(columnValue);
}
@Override
public String getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
String columnValue = rs.getString(columnIndex);
return AESUtil.decrypt(columnValue);
}
@Override
public String getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
String columnValue = cs.getString(columnIndex);
return AESUtil.decrypt(columnValue);
}
}

View File

@@ -0,0 +1,29 @@
package com.platform.admin.core.handler;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.handler.IJobHandler;
import com.platform.core.handler.annotation.JobHandler;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* @ClassName DataxJobHandler
* @Description: executorJobHandler
* @authorAllDataDC
*/
@Slf4j
@Component
@JobHandler("executorJobHandler")
public class DataxJobHandler extends IJobHandler {
@Override
public ReturnT<String> execute(TriggerParam tgParam) throws Exception {
log.info("---------Datax定时任务开始执行--------");
//数据抽取具体的执行方法
JobTriggerPoolHelper.runJob(tgParam.getJobId());
System.out.println("---------Datax定时任务执行成功--------");
return ReturnT.SUCCESS;
}
}

View File

@@ -0,0 +1,37 @@
package com.platform.admin.core.handler;
import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.reflection.MetaObject;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import java.util.Date;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 通用的字段填充如createBy createDate这些字段的自动填充
**/
@Component
@Slf4j
public class MybatisMetaObjectHandler implements MetaObjectHandler {
@Override
public void insertFill(MetaObject metaObject) {
setFieldValByName("createDate", new Date(), metaObject);
setFieldValByName("createBy", getCurrentUser(), metaObject);
}
@Override
public void updateFill(MetaObject metaObject) {
setFieldValByName("updateDate", new Date(), metaObject);
setFieldValByName("updateBy", getCurrentUser(), metaObject);
}
private String getCurrentUser() {
return SecurityContextHolder.getContext().getAuthentication().getPrincipal().toString();
}
}

View File

@@ -0,0 +1,43 @@
package com.platform.admin.core.kill;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.core.glue.GlueTypeEnum;
import com.platform.admin.core.trigger.JobTrigger;
import com.platform.core.util.Constants;
import com.platform.core.util.ProcessUtil;
import java.util.Date;
/**
* flinkx-job trigger
*/
public class KillJob {
/**
* @param logId
* @param address
* @param processId
*/
public static ReturnT<String> trigger(String processId) {
ReturnT<String> triggerResult = null;
try {
//将作业杀掉
String cmdstr="";
if(JobTriggerPoolHelper.isWindows()){
cmdstr= Constants.CMDWINDOWTASKKILL+processId;
}else {
cmdstr=Constants.CMDLINUXTASKKILL+processId;
}
final Process process = Runtime.getRuntime().exec(cmdstr);
String prcsId = ProcessUtil.getProcessId(process);
triggerResult = new ReturnT<>(ReturnT.SUCCESS_CODE, "成功停止作业 !!!");
}catch (Exception e) {
triggerResult = new ReturnT<>(ReturnT.FAIL_CODE, null);
}
return triggerResult;
}
}

View File

@@ -0,0 +1,55 @@
package com.platform.admin.core.route;
import com.platform.admin.core.route.strategy.*;
import com.platform.admin.core.route.strategy.ExecutorRouteBusyover;
import com.platform.admin.core.route.strategy.ExecutorRouteConsistentHash;
import com.platform.admin.core.route.strategy.ExecutorRouteFailover;
import com.platform.admin.core.route.strategy.ExecutorRouteFirst;
import com.platform.admin.core.route.strategy.ExecutorRouteLFU;
import com.platform.admin.core.route.strategy.ExecutorRouteLRU;
import com.platform.admin.core.route.strategy.ExecutorRouteLast;
import com.platform.admin.core.route.strategy.ExecutorRouteRandom;
import com.platform.admin.core.route.strategy.ExecutorRouteRound;
import com.platform.admin.core.util.I18nUtil;
public enum ExecutorRouteStrategyEnum {
FIRST(I18nUtil.getString("jobconf_route_first"), new ExecutorRouteFirst()),
LAST(I18nUtil.getString("jobconf_route_last"), new ExecutorRouteLast()),
ROUND(I18nUtil.getString("jobconf_route_round"), new ExecutorRouteRound()),
RANDOM(I18nUtil.getString("jobconf_route_random"), new ExecutorRouteRandom()),
CONSISTENT_HASH(I18nUtil.getString("jobconf_route_consistenthash"), new ExecutorRouteConsistentHash()),
LEAST_FREQUENTLY_USED(I18nUtil.getString("jobconf_route_lfu"), new ExecutorRouteLFU()),
LEAST_RECENTLY_USED(I18nUtil.getString("jobconf_route_lru"), new ExecutorRouteLRU()),
FAILOVER(I18nUtil.getString("jobconf_route_failover"), new ExecutorRouteFailover()),
BUSYOVER(I18nUtil.getString("jobconf_route_busyover"), new ExecutorRouteBusyover()),
SHARDING_BROADCAST(I18nUtil.getString("jobconf_route_shard"), null);
ExecutorRouteStrategyEnum(String title, ExecutorRouter router) {
this.title = title;
this.router = router;
}
private String title;
private ExecutorRouter router;
public String getTitle() {
return title;
}
public ExecutorRouter getRouter() {
return router;
}
public static ExecutorRouteStrategyEnum match(String name, ExecutorRouteStrategyEnum defaultItem){
if (name != null) {
for (ExecutorRouteStrategyEnum item: ExecutorRouteStrategyEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
}
return defaultItem;
}
}

View File

@@ -0,0 +1,21 @@
package com.platform.admin.core.route;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public abstract class ExecutorRouter {
protected static Logger logger = LoggerFactory.getLogger(ExecutorRouter.class);
/**
* route address
*
* @param addressList
* @return ReturnT.content=address
*/
public abstract ReturnT<String> route(TriggerParam triggerParam, List<String> addressList);
}

View File

@@ -0,0 +1,43 @@
package com.platform.admin.core.route.strategy;
import com.platform.admin.core.route.ExecutorRouter;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import java.util.List;
public class ExecutorRouteBusyover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuffer idleBeatResultSB = new StringBuffer();
for (String address : addressList) {
// beat
ReturnT<String> idleBeatResult = null;
try {
// ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
// idleBeatResult = executorBiz.idleBeat(triggerParam.getJobId());
} catch (Exception e) {
logger.error(e.getMessage(), e);
idleBeatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
idleBeatResultSB.append( (idleBeatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_idleBeat") + "")
.append("<br>address").append(address)
.append("<br>code").append(idleBeatResult.getCode())
.append("<br>msg").append(idleBeatResult.getMsg());
// beat success
if (idleBeatResult.getCode() == ReturnT.SUCCESS_CODE) {
idleBeatResult.setMsg(idleBeatResultSB.toString());
idleBeatResult.setContent(address);
return idleBeatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, idleBeatResultSB.toString());
}
}

View File

@@ -0,0 +1,84 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* 分组下机器地址相同不同JOB均匀散列在不同机器上保证分组下机器分配JOB平均且每个JOB固定调度其中一台机器
* a、virtual node解决不均衡问题
* b、hash method replace hashCodeString的hashCode可能重复需要进一步扩大hashCode的取值范围
*/
public class ExecutorRouteConsistentHash extends ExecutorRouter {
private static int VIRTUAL_NODE_NUM = 100;
/**
* get hash code on 2^32 ring (md5散列的方式计算hash值)
* @param key
* @return
*/
private static long hash(String key) {
// md5 byte
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("MD5 not supported", e);
}
md5.reset();
byte[] keyBytes = null;
try {
keyBytes = key.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Unknown string :" + key, e);
}
md5.update(keyBytes);
byte[] digest = md5.digest();
// hash code, Truncate to 32-bits
long hashCode = ((long) (digest[3] & 0xFF) << 24)
| ((long) (digest[2] & 0xFF) << 16)
| ((long) (digest[1] & 0xFF) << 8)
| (digest[0] & 0xFF);
long truncateHashCode = hashCode & 0xffffffffL;
return truncateHashCode;
}
public String hashJob(int jobId, List<String> addressList) {
// ------A1------A2-------A3------
// -----------J1------------------
TreeMap<Long, String> addressRing = new TreeMap<Long, String>();
for (String address: addressList) {
for (int i = 0; i < VIRTUAL_NODE_NUM; i++) {
long addressHash = hash("SHARD-" + address + "-NODE-" + i);
addressRing.put(addressHash, address);
}
}
long jobHash = hash(String.valueOf(jobId));
SortedMap<Long, String> lastRing = addressRing.tailMap(jobHash);
if (!lastRing.isEmpty()) {
return lastRing.get(lastRing.firstKey());
}
return addressRing.firstEntry().getValue();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = hashJob(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,44 @@
package com.platform.admin.core.route.strategy;
import com.platform.admin.core.route.ExecutorRouter;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import java.util.List;
public class ExecutorRouteFailover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuffer beatResultSB = new StringBuffer();
for (String address : addressList) {
// beat
ReturnT<String> beatResult = null;
try {
// ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
// beatResult = executorBiz.beat();
} catch (Exception e) {
logger.error(e.getMessage(), e);
beatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
beatResultSB.append( (beatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_beat") + "")
.append("<br>address").append(address)
.append("<br>code").append(beatResult.getCode())
.append("<br>msg").append(beatResult.getMsg());
// beat success
if (beatResult.getCode() == ReturnT.SUCCESS_CODE) {
beatResult.setMsg(beatResultSB.toString());
beatResult.setContent(address);
return beatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, beatResultSB.toString());
}
}

View File

@@ -0,0 +1,16 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
public class ExecutorRouteFirst extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList){
return new ReturnT<String>(addressList.get(0));
}
}

View File

@@ -0,0 +1,78 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* 单个JOB对应的每个执行器使用频率最低的优先被选举
* a(*)、LFU(Least Frequently Used):最不经常使用,频率/次数
* b、LRU(Least Recently Used):最近最久未使用,时间
*
*/
public class ExecutorRouteLFU extends ExecutorRouter {
private static ConcurrentMap<Integer, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<Integer, HashMap<String, Integer>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLfuMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// lfu item init
HashMap<String, Integer> lfuItemMap = jobLfuMap.get(jobId); // Key排序可以用TreeMap+构造入参CompareValue排序暂时只能通过ArrayList
if (lfuItemMap == null) {
lfuItemMap = new HashMap<String, Integer>();
jobLfuMap.putIfAbsent(jobId, lfuItemMap); // 避免重复覆盖
}
// put new
for (String address: addressList) {
if (!lfuItemMap.containsKey(address) || lfuItemMap.get(address) >1000000 ) {
lfuItemMap.put(address, new Random().nextInt(addressList.size())); // 初始化时主动Random一次缓解首次压力
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lfuItemMap.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lfuItemMap.remove(delKey);
}
}
// load least userd count address
List<Map.Entry<String, Integer>> lfuItemList = new ArrayList<Map.Entry<String, Integer>>(lfuItemMap.entrySet());
Collections.sort(lfuItemList, new Comparator<Map.Entry<String, Integer>>() {
@Override
public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
return o1.getValue().compareTo(o2.getValue());
}
});
Map.Entry<String, Integer> addressItem = lfuItemList.get(0);
String minAddress = addressItem.getKey();
addressItem.setValue(addressItem.getValue() + 1);
return addressItem.getKey();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,75 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* 单个JOB对应的每个执行器最久为使用的优先被选举
* a、LFU(Least Frequently Used):最不经常使用,频率/次数
* b(*)、LRU(Least Recently Used):最近最久未使用,时间
*/
public class ExecutorRouteLRU extends ExecutorRouter {
private static ConcurrentMap<Integer, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<Integer, LinkedHashMap<String, String>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLRUMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// init lru
LinkedHashMap<String, String> lruItem = jobLRUMap.get(jobId);
if (lruItem == null) {
/**
* LinkedHashMap
* a、accessOrdertrue=访问顺序排序get/put时排序false=插入顺序排期;
* b、removeEldestEntry新增元素时将会调用返回true时会删除最老元素可封装LinkedHashMap并重写该方法比如定义最大容量超出是返回true即可实现固定长度的LRU算法
*/
lruItem = new LinkedHashMap<String, String>(16, 0.75f, true);
jobLRUMap.putIfAbsent(jobId, lruItem);
}
// put new
for (String address: addressList) {
if (!lruItem.containsKey(address)) {
lruItem.put(address, address);
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lruItem.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lruItem.remove(delKey);
}
}
// load
String eldestKey = lruItem.entrySet().iterator().next().getKey();
String eldestValue = lruItem.get(eldestKey);
return eldestValue;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,16 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
public class ExecutorRouteLast extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
return new ReturnT<String>(addressList.get(addressList.size()-1));
}
}

View File

@@ -0,0 +1,20 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
import java.util.Random;
public class ExecutorRouteRandom extends ExecutorRouter {
private static Random localRandom = new Random();
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(localRandom.nextInt(addressList.size()));
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,36 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class ExecutorRouteRound extends ExecutorRouter {
private static ConcurrentMap<Integer, Integer> routeCountEachJob = new ConcurrentHashMap<Integer, Integer>();
private static long CACHE_VALID_TIME = 0;
private static int count(int jobId) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
routeCountEachJob.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// count++
Integer count = routeCountEachJob.get(jobId);
count = (count==null || count>1000000)?(new Random().nextInt(100)):++count; // 初始化时主动Random一次缓解首次压力
routeCountEachJob.put(jobId, count);
return count;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(count(triggerParam.getJobId())%addressList.size());
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,67 @@
package com.platform.admin.core.scheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.admin.core.thread.JobFailMonitorHelper;
import com.platform.admin.core.thread.JobLogReportHelper;
import com.platform.admin.core.thread.JobRegistryMonitorHelper;
import com.platform.admin.core.thread.JobScheduleHelper;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JobScheduler {
private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class);
public void init() throws Exception {
// init i18n
initI18n();
// admin registry monitor run
JobRegistryMonitorHelper.getInstance().start();
// admin monitor run
JobFailMonitorHelper.getInstance().start();
// admin trigger pool start
JobTriggerPoolHelper.toStart();
// admin log report start
JobLogReportHelper.getInstance().start();
// start-schedule
JobScheduleHelper.getInstance().start();
logger.info(">>>>>>>>> init service-data-dts admin success.");
}
public void destroy() throws Exception {
// stop-schedule
JobScheduleHelper.getInstance().toStop();
// admin log report stop
JobLogReportHelper.getInstance().toStop();
// admin trigger pool stop
JobTriggerPoolHelper.toStop();
// admin monitor stop
JobFailMonitorHelper.getInstance().toStop();
// admin registry stop
JobRegistryMonitorHelper.getInstance().toStop();
}
// ---------------------- I18n ----------------------
private void initI18n() {
for (ExecutorBlockStrategyEnum item : ExecutorBlockStrategyEnum.values()) {
item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name())));
}
}
}

View File

@@ -0,0 +1,207 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLog;
import com.platform.core.biz.model.ReturnT;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mail.javamail.MimeMessageHelper;
import javax.mail.internet.MimeMessage;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* job monitor instance
*/
public class JobFailMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobFailMonitorHelper.class);
private static JobFailMonitorHelper instance = new JobFailMonitorHelper();
public static JobFailMonitorHelper getInstance(){
return instance;
}
// ---------------------- monitor ----------------------
private Thread monitorThread;
private volatile boolean toStop = false;
public void start(){
monitorThread = new Thread(new Runnable() {
@Override
public void run() {
// monitor
while (!toStop) {
try {
List<Long> failLogIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findFailJobLogIds(1000);
if (failLogIds!=null && !failLogIds.isEmpty()) {
for (long failLogId: failLogIds) {
// lock log
int lockRet = JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, 0, -1);
if (lockRet < 1) {
continue;
}
JobLog log = JobAdminConfig.getAdminConfig().getJobLogMapper().load(failLogId);
JobInfo info = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(log.getJobId());
// 1、fail retry monitor
if (log.getExecutorFailRetryCount() > 0) {
JobTriggerPoolHelper.trigger(log.getJobId(), TriggerTypeEnum.RETRY, (log.getExecutorFailRetryCount()-1), log.getExecutorShardingParam(), log.getExecutorParam());
String retryMsg = "<br><br><span style=\"color:#F39C12;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_type_retry") +"<<<<<<<<<<< </span><br>";
log.setTriggerMsg(log.getTriggerMsg() + retryMsg);
JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(log);
}
// 2、fail alarm monitor
int newAlarmStatus = 0; // 告警状态0-默认、-1=锁定状态、1-无需告警、2-告警成功、3-告警失败
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
boolean alarmResult = true;
try {
alarmResult = failAlarm(info, log);
} catch (Exception e) {
alarmResult = false;
logger.error(e.getMessage(), e);
}
newAlarmStatus = alarmResult?2:3;
} else {
newAlarmStatus = 1;
}
JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, -1, newAlarmStatus);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job fail monitor thread error:{0}", e);
}
}
try {
TimeUnit.SECONDS.sleep(10);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, job fail monitor thread stop");
}
});
monitorThread.setDaemon(true);
monitorThread.setName("service-data-dts, admin JobFailMonitorHelper");
monitorThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
monitorThread.interrupt();
try {
monitorThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// ---------------------- alarm ----------------------
// email alarm template
private static final String mailBodyTemplate = "<h5>" + I18nUtil.getString("jobconf_monitor_detail") + "</span>" +
"<table border=\"1\" cellpadding=\"3\" style=\"border-collapse:collapse; width:80%;\" >\n" +
" <thead style=\"font-weight: bold;color: #ffffff;background-color: #ff8c00;\" >" +
" <tr>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobgroup") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobinfo_field_id") +"</td>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobdesc") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_title") +"</td>\n" +
" <td width=\"40%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_content") +"</td>\n" +
" </tr>\n" +
" </thead>\n" +
" <tbody>\n" +
" <tr>\n" +
" <td>{0}</td>\n" +
" <td>{1}</td>\n" +
" <td>{2}</td>\n" +
" <td>"+ I18nUtil.getString("jobconf_monitor_alarm_type") +"</td>\n" +
" <td>{3}</td>\n" +
" </tr>\n" +
" </tbody>\n" +
"</table>";
/**
* fail alarm
*
* @param jobLog
*/
private boolean failAlarm(JobInfo info, JobLog jobLog){
boolean alarmResult = true;
// send monitor email
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
// alarmContent
String alarmContent = "Alarm Job LogId=" + jobLog.getId();
if (jobLog.getTriggerCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>TriggerMsg=<br>" + jobLog.getTriggerMsg();
}
if (jobLog.getHandleCode()>0 && jobLog.getHandleCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>HandleCode=" + jobLog.getHandleMsg();
}
// email info
JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(Integer.valueOf(info.getJobGroup()));
String personal = I18nUtil.getString("admin_name_full");
String title = I18nUtil.getString("jobconf_monitor");
String content = MessageFormat.format(mailBodyTemplate,
group!=null?group.getTitle():"null",
info.getId(),
info.getJobDesc(),
alarmContent);
Set<String> emailSet = new HashSet<String>(Arrays.asList(info.getAlarmEmail().split(",")));
for (String email: emailSet) {
// make mail
try {
MimeMessage mimeMessage = JobAdminConfig.getAdminConfig().getMailSender().createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true);
helper.setFrom(JobAdminConfig.getAdminConfig().getEmailUserName(), personal);
helper.setTo(email);
helper.setSubject(title);
helper.setText(content, true);
JobAdminConfig.getAdminConfig().getMailSender().send(mimeMessage);
} catch (Exception e) {
logger.error(">>>>>>>>>>> service-data-dts, job fail alarm email send error, JobLogId:{}", jobLog.getId(), e);
alarmResult = false;
}
}
}
// do something, custom alarm strategy, such as sms
return alarmResult;
}
}

View File

@@ -0,0 +1,151 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.entity.JobLogReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* job log report helper
*/
public class JobLogReportHelper {
private static Logger logger = LoggerFactory.getLogger(JobLogReportHelper.class);
private static JobLogReportHelper instance = new JobLogReportHelper();
public static JobLogReportHelper getInstance(){
return instance;
}
private Thread logrThread;
private volatile boolean toStop = false;
public void start(){
logrThread = new Thread(new Runnable() {
@Override
public void run() {
// last clean log time
long lastCleanLogTime = 0;
while (!toStop) {
// 1、log-report refresh: refresh log report in 3 days
try {
for (int i = 0; i < 3; i++) {
// today
Calendar itemDay = Calendar.getInstance();
itemDay.add(Calendar.DAY_OF_MONTH, -i);
itemDay.set(Calendar.HOUR_OF_DAY, 0);
itemDay.set(Calendar.MINUTE, 0);
itemDay.set(Calendar.SECOND, 0);
itemDay.set(Calendar.MILLISECOND, 0);
Date todayFrom = itemDay.getTime();
itemDay.set(Calendar.HOUR_OF_DAY, 23);
itemDay.set(Calendar.MINUTE, 59);
itemDay.set(Calendar.SECOND, 59);
itemDay.set(Calendar.MILLISECOND, 999);
Date todayTo = itemDay.getTime();
// refresh log-report every minute
JobLogReport xxlJobLogReport = new JobLogReport();
xxlJobLogReport.setTriggerDay(todayFrom);
xxlJobLogReport.setRunningCount(0);
xxlJobLogReport.setSucCount(0);
xxlJobLogReport.setFailCount(0);
Map<String, Object> triggerCountMap = JobAdminConfig
.getAdminConfig().getJobLogMapper().findLogReport(todayFrom, todayTo);
if (triggerCountMap!=null && triggerCountMap.size()>0) {
int triggerDayCount = triggerCountMap.containsKey("triggerDayCount")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCount"))):0;
int triggerDayCountRunning = triggerCountMap.containsKey("triggerDayCountRunning")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountRunning"))):0;
int triggerDayCountSuc = triggerCountMap.containsKey("triggerDayCountSuc")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountSuc"))):0;
int triggerDayCountFail = triggerDayCount - triggerDayCountRunning - triggerDayCountSuc;
xxlJobLogReport.setRunningCount(triggerDayCountRunning);
xxlJobLogReport.setSucCount(triggerDayCountSuc);
xxlJobLogReport.setFailCount(triggerDayCountFail);
}
// do refresh
int ret = JobAdminConfig.getAdminConfig().getJobLogReportMapper().update(xxlJobLogReport);
if (ret < 1) {
JobAdminConfig.getAdminConfig().getJobLogReportMapper().save(xxlJobLogReport);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job log report thread error:{}", e);
}
}
// 2、log-clean: switch open & once each day
if (JobAdminConfig.getAdminConfig().getLogretentiondays()>0
&& System.currentTimeMillis() - lastCleanLogTime > 24*60*60*1000) {
// expire-time
Calendar expiredDay = Calendar.getInstance();
expiredDay.add(Calendar.DAY_OF_MONTH, -1 * JobAdminConfig.getAdminConfig().getLogretentiondays());
expiredDay.set(Calendar.HOUR_OF_DAY, 0);
expiredDay.set(Calendar.MINUTE, 0);
expiredDay.set(Calendar.SECOND, 0);
expiredDay.set(Calendar.MILLISECOND, 0);
Date clearBeforeTime = expiredDay.getTime();
// clean expired log
List<Long> logIds = null;
do {
logIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findClearLogIds(0, 0, clearBeforeTime, 0, 1000);
if (logIds!=null && logIds.size()>0) {
JobAdminConfig.getAdminConfig().getJobLogMapper().clearLog(logIds);
}
} while (logIds!=null && logIds.size()>0);
// update clean time
lastCleanLogTime = System.currentTimeMillis();
}
try {
TimeUnit.MINUTES.sleep(1);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, job log report thread stop");
}
});
logrThread.setDaemon(true);
logrThread.setName("service-data-dts, admin JobLogReportHelper");
logrThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
logrThread.interrupt();
try {
logrThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,103 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobRegistry;
import com.platform.core.enums.RegistryConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.TimeUnit;
public class JobRegistryMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobRegistryMonitorHelper.class);
private static JobRegistryMonitorHelper instance = new JobRegistryMonitorHelper();
public static JobRegistryMonitorHelper getInstance(){
return instance;
}
private Thread registryThread;
private volatile boolean toStop = false;
public void start(){
registryThread = new Thread(() -> {
while (!toStop) {
try {
// auto registry group
List<JobGroup> groupList = JobAdminConfig.getAdminConfig().getJobGroupMapper().findByAddressType(0);
if (groupList!=null && !groupList.isEmpty()) {
// remove dead address (admin/executor)
List<Integer> ids = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findDead(RegistryConfig.DEAD_TIMEOUT, new Date());
if (ids!=null && ids.size()>0) {
JobAdminConfig.getAdminConfig().getJobRegistryMapper().removeDead(ids);
}
// fresh online address (admin/executor)
HashMap<String, List<String>> appAddressMap = new HashMap<>();
List<JobRegistry> list = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (JobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appName = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appName);
if (registryList == null) {
registryList = new ArrayList<>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appName, registryList);
}
}
}
// fresh group address
for (JobGroup group: groupList) {
List<String> registryList = appAddressMap.get(group.getAppName());
String addressListStr = null;
if (registryList!=null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item:registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length()-1);
}
group.setAddressList(addressListStr);
JobAdminConfig.getAdminConfig().getJobGroupMapper().update(group);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job registry monitor thread error:{}", e);
}
}
try {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job registry monitor thread error:{}", e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, job registry monitor thread stop");
});
registryThread.setDaemon(true);
registryThread.setName("service-data-dts, admin JobRegistryMonitorHelper");
registryThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
registryThread.interrupt();
try {
registryThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,349 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.cron.CronExpression;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.entity.JobInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
public class JobScheduleHelper {
private static Logger logger = LoggerFactory.getLogger(JobScheduleHelper.class);
private static JobScheduleHelper instance = new JobScheduleHelper();
public static JobScheduleHelper getInstance() {
return instance;
}
public static final long PRE_READ_MS = 5000; // pre read
private Thread scheduleThread;
private Thread ringThread;
private volatile boolean scheduleThreadToStop = false;
private volatile boolean ringThreadToStop = false;
private volatile static Map<Integer, List<Integer>> ringData = new ConcurrentHashMap<>();
public void start() {
// schedule thread
scheduleThread = new Thread(new Runnable() {
@Override
public void run() {
try {
TimeUnit.MILLISECONDS.sleep(5000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>> init service-data-dts admin scheduler success.");
// pre-read count: treadpool-size * trigger-qps (each trigger cost 50ms, qps = 1000/50 = 20)
int preReadCount = (JobAdminConfig.getAdminConfig().getTriggerPoolFastMax() + JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax()) * 20;
while (!scheduleThreadToStop) {
// Scan Job
long start = System.currentTimeMillis();
Connection conn = null;
Boolean connAutoCommit = null;
PreparedStatement preparedStatement = null;
boolean preReadSuc = true;
try {
conn = JobAdminConfig.getAdminConfig().getDataSource().getConnection();
connAutoCommit = conn.getAutoCommit();
conn.setAutoCommit(false);
preparedStatement = conn.prepareStatement("select * from job_lock where lock_name = 'schedule_lock' for update");
preparedStatement.execute();
// tx start
// 1、pre read
long nowTime = System.currentTimeMillis();
List<JobInfo> scheduleList = JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount);
if (scheduleList != null && scheduleList.size() > 0) {
// 2、push time-ring
for (JobInfo jobInfo : scheduleList) {
// time-ring jump
if (nowTime > jobInfo.getTriggerNextTime() + PRE_READ_MS) {
// 2.1、trigger-expire > 5spass && make next-trigger-time
logger.warn(">>>>>>>>>>> service-data-dts, schedule misfire, jobId = " + jobInfo.getId());
// fresh next
refreshNextValidTime(jobInfo, new Date());
} else if (nowTime > jobInfo.getTriggerNextTime()) {
// 2.2、trigger-expire < 5sdirect-trigger && make next-trigger-time
// 1、trigger
JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.CRON, -1, null, null);
logger.debug(">>>>>>>>>>> service-data-dts, schedule push trigger : jobId = " + jobInfo.getId());
// 2、fresh next
refreshNextValidTime(jobInfo, new Date());
// next-trigger-time in 5s, pre-read again
if (jobInfo.getTriggerStatus() == 1 && nowTime + PRE_READ_MS > jobInfo.getTriggerNextTime()) {
// 1、make ring second
int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
// 2、push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3、fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
} else {
// 2.3、trigger-pre-readtime-ring trigger && make next-trigger-time
// 1、make ring second
int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
// 2、push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3、fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
}
// 3、update trigger info
for (JobInfo jobInfo : scheduleList) {
JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleUpdate(jobInfo);
}
} else {
preReadSuc = false;
}
// tx stop
} catch (Exception e) {
if (!scheduleThreadToStop) {
logger.error(">>>>>>>>>>> service-data-dts, JobScheduleHelper#scheduleThread error:{}", e);
}
} finally {
// commit
if (conn != null) {
try {
conn.commit();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.setAutoCommit(connAutoCommit);
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
// close PreparedStatement
if (null != preparedStatement) {
try {
preparedStatement.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
long cost = System.currentTimeMillis() - start;
// Wait seconds, align second
if (cost < 1000) { // scan-overtime, not wait
try {
// pre-read period: success > scan each second; fail > skip this period;
TimeUnit.MILLISECONDS.sleep((preReadSuc ? 1000 : PRE_READ_MS) - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, JobScheduleHelper#scheduleThread stop");
}
});
scheduleThread.setDaemon(true);
scheduleThread.setName("service-data-dts, admin JobScheduleHelper#scheduleThread");
scheduleThread.start();
// ring thread
ringThread = new Thread(() -> {
// align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
while (!ringThreadToStop) {
try {
// second data
List<Integer> ringItemData = new ArrayList<>();
int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长,跨过刻度,向前校验一个刻度;
for (int i = 0; i < 2; i++) {
List<Integer> tmpData = ringData.remove((nowSecond + 60 - i) % 60);
if (tmpData != null) {
ringItemData.addAll(tmpData);
}
}
// ring trigger
logger.debug(">>>>>>>>>>> service-data-dts, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData));
if (ringItemData.size() > 0) {
// do trigger
for (int jobId : ringItemData) {
// do trigger
JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null);
}
// clear
ringItemData.clear();
}
} catch (Exception e) {
if (!ringThreadToStop) {
logger.error(">>>>>>>>>>> service-data-dts, JobScheduleHelper#ringThread error:{}", e);
}
}
// next second, align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, JobScheduleHelper#ringThread stop");
});
ringThread.setDaemon(true);
ringThread.setName("service-data-dts, admin JobScheduleHelper#ringThread");
ringThread.start();
}
private void refreshNextValidTime(JobInfo jobInfo, Date fromTime) throws ParseException {
Date nextValidTime = new CronExpression(jobInfo.getJobCron()).getNextValidTimeAfter(fromTime);
if (nextValidTime != null) {
jobInfo.setTriggerLastTime(jobInfo.getTriggerNextTime());
jobInfo.setTriggerNextTime(nextValidTime.getTime());
} else {
jobInfo.setTriggerStatus(0);
jobInfo.setTriggerLastTime(0);
jobInfo.setTriggerNextTime(0);
}
}
private void pushTimeRing(int ringSecond, int jobId) {
// push async ring
List<Integer> ringItemData = ringData.get(ringSecond);
if (ringItemData == null) {
ringItemData = new ArrayList<Integer>();
ringData.put(ringSecond, ringItemData);
}
ringItemData.add(jobId);
logger.debug(">>>>>>>>>>> service-data-dts, schedule push time-ring : " + ringSecond + " = " + Arrays.asList(ringItemData));
}
public void toStop() {
// 1、stop schedule
scheduleThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1); // wait
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (scheduleThread.getState() != Thread.State.TERMINATED) {
// interrupt and wait
scheduleThread.interrupt();
try {
scheduleThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// if has ring data
boolean hasRingData = false;
if (!ringData.isEmpty()) {
for (int second : ringData.keySet()) {
List<Integer> tmpData = ringData.get(second);
if (tmpData != null && tmpData.size() > 0) {
hasRingData = true;
break;
}
}
}
if (hasRingData) {
try {
TimeUnit.SECONDS.sleep(8);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// stop ring (wait job-in-memory stop)
ringThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (ringThread.getState() != Thread.State.TERMINATED) {
// interrupt and wait
ringThread.interrupt();
try {
ringThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> service-data-dts, JobScheduleHelper stop");
}
}

View File

@@ -0,0 +1,274 @@
package com.platform.admin.core.thread;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.IdUtil;
import com.platform.admin.core.conf.ExcecutorConfig;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.trigger.JobTrigger;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLog;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.log.JobLogger;
import com.platform.core.util.Constants;
import com.platform.core.util.ProcessUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* job trigger thread pool helper
*/
public class JobTriggerPoolHelper {
private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class);
// ---------------------- trigger pool ----------------------
// fast/slow thread pool
private ThreadPoolExecutor fastTriggerPool = null;
private ThreadPoolExecutor slowTriggerPool = null;
public void start() {
fastTriggerPool = new ThreadPoolExecutor(
10,
JobAdminConfig.getAdminConfig().getTriggerPoolFastMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(1000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "service-data-dts, admin JobTriggerPoolHelper-fastTriggerPool-" + r.hashCode());
}
});
slowTriggerPool = new ThreadPoolExecutor(
10,
JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(2000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "service-data-dts, admin JobTriggerPoolHelper-slowTriggerPool-" + r.hashCode());
}
});
}
public void stop() {
//triggerPool.shutdown();
fastTriggerPool.shutdownNow();
slowTriggerPool.shutdownNow();
logger.info(">>>>>>>>> service-data-dts trigger thread pool shutdown success.");
}
// job timeout count
private volatile long minTim = System.currentTimeMillis() / 60000; // ms > min
private volatile ConcurrentMap<Integer, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>();
/**
* add trigger
*/
public void addTrigger(final int jobId, final TriggerTypeEnum triggerType, final int failRetryCount, final String executorShardingParam, final String executorParam) {
// choose thread pool
ThreadPoolExecutor triggerPool_ = fastTriggerPool;
AtomicInteger jobTimeoutCount = jobTimeoutCountMap.get(jobId);
if (jobTimeoutCount != null && jobTimeoutCount.get() > 10) { // job-timeout 10 times in 1 min
triggerPool_ = slowTriggerPool;
}
// trigger
triggerPool_.execute(() -> {
long start = System.currentTimeMillis();
try {
// do trigger
JobTrigger.trigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
// check timeout-count-map
long minTim_now = System.currentTimeMillis() / 60000;
if (minTim != minTim_now) {
minTim = minTim_now;
jobTimeoutCountMap.clear();
}
// incr timeout-count-map
long cost = System.currentTimeMillis() - start;
if (cost > 500) { // ob-timeout threshold 500ms
AtomicInteger timeoutCount = jobTimeoutCountMap.putIfAbsent(jobId, new AtomicInteger(1));
if (timeoutCount != null) {
timeoutCount.incrementAndGet();
}
}
}
});
}
// ---------------------- helper ----------------------
private static JobTriggerPoolHelper helper = new JobTriggerPoolHelper();
public static void toStart() {
helper.start();
}
public static void toStop() {
helper.stop();
}
/**
* @param jobId
* @param triggerType
* @param failRetryCount >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
}
public static String[] buildFlinkXExecutorCmd(String flinkXShPath, String tmpFilePath,int jobId) {
long timestamp = System.currentTimeMillis();
List<String> cmdArr = new ArrayList<>();
if(JobTriggerPoolHelper.isWindows()) {
cmdArr.add(Constants.CMDWINDOW);
cmdArr.add(flinkXShPath);
cmdArr.add(tmpFilePath);
} else {
cmdArr.add(Constants.CMDLINUX);
cmdArr.add(flinkXShPath);
cmdArr.add(tmpFilePath);
}
String logHome = ExcecutorConfig.getExcecutorConfig().getFlinkxlogHome();
File folder = new File(logHome);
if (!folder.exists() && !folder.isDirectory()) {
folder.mkdirs();
}
// cmdArr.add(logHome+"/"+jobId+""+timestamp+".out");
logger.info(cmdArr + " " + flinkXShPath + " " + tmpFilePath);
return cmdArr.toArray(new String[cmdArr.size()]);
}
public static boolean isWindows() {
return System.getProperty("os.name").toLowerCase().contains("windows");
}
public static void runJob(int jobId) {
InputStreamReader isReader = null;
BufferedReader bfReader = null;
FileOutputStream out = null;
try {
JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId);
String cmdstr = "";
String tmpFilePath ="";
String[] cmdarrayFinal = null;
tmpFilePath = generateTemJsonFile(jobInfo.getJobJson());
cmdarrayFinal = buildFlinkXExecutorCmd(ExcecutorConfig.getExcecutorConfig().getFlinkxHome(), tmpFilePath, jobId);
for (int j = 0; j < cmdarrayFinal.length; j++) {
if (cmdarrayFinal[j].contains(".log")) {
cmdstr += " > " + cmdarrayFinal[j] ;
}else {
cmdstr += cmdarrayFinal[j] + " ";
}
}
if(cmdstr.indexOf("python")>0){
cmdstr = cmdstr.substring(cmdstr.indexOf("python"), cmdstr.length());
}
final Process process = Runtime.getRuntime().exec(cmdstr);
String prcsId = ProcessUtil.getProcessId(process);
JobLogger.log("Execute: " + cmdstr);
JobLogger.log("process id: " + prcsId);
//jeff优化直接执行不生效问题
isReader = new InputStreamReader(process.getInputStream(), "UTF-8");
bfReader = new BufferedReader(isReader);
String line = null;
String logPath = ExcecutorConfig.getExcecutorConfig().getFlinkxlogHome()+"/"+jobId+""+System.currentTimeMillis()+".log";
JobLogger.log("logPath: " + logPath);
out = new FileOutputStream(logPath);
while ((line = bfReader.readLine()) != null){
logger.info(line);
out.write(line.getBytes());
String newLine = System.getProperty("line.separator");
out.write(newLine.getBytes());
}
process.waitFor();
if (FileUtil.exist(tmpFilePath)) {
// FileUtil.del(new File(tmpFilePath));
}
// 记录日志
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MILLISECOND, 0);
Date triggerTime = calendar.getTime();
JobLog jobLog = new JobLog();
jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(triggerTime);
jobLog.setJobDesc(jobInfo.getJobDesc());
jobLog.setHandleTime(triggerTime);
jobLog.setTriggerCode(ReturnT.SUCCESS_CODE);
jobLog.setHandleCode(0);
jobLog.setProcessId(prcsId);
// 设置job的执行路径
jobLog.setExecutorAddress(logPath);
JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog);
} catch (Exception e) {
e.printStackTrace();
}finally {
if(out != null){
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(bfReader != null){
try {
bfReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(isReader != null){
try {
isReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private static String generateTemJsonFile(String jobJson) {
String jsonPath = "";
jsonPath = ExcecutorConfig.getExcecutorConfig().getFlinkxjsonPath();
if (!FileUtil.exist(jsonPath)) {
FileUtil.mkdir(jsonPath);
}
String tmpFilePath = jsonPath + "jobTmp-" + IdUtil.simpleUUID() + ".json";
//jobJSON进行替换操作
// 根据json写入到临时本地文件
try (PrintWriter writer = new PrintWriter(tmpFilePath, "UTF-8")) {
writer.println(jobJson);
} catch (FileNotFoundException | UnsupportedEncodingException e) {
JobLogger.log("JSON 临时文件写入异常:" + e.getMessage());
}
return tmpFilePath;
}
}

View File

@@ -0,0 +1,258 @@
package com.platform.admin.core.trigger;
import com.platform.rpc.util.IpUtil;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.route.ExecutorRouteStrategyEnum;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobDatasource;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLog;
import com.platform.admin.tool.query.BaseQueryTool;
import com.platform.admin.tool.query.QueryToolFactory;
import com.platform.admin.util.JSONUtils;
import com.platform.core.biz.ExecutorBiz;
import com.platform.core.biz.impl.ExecutorBizImpl;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.core.enums.IncrementTypeEnum;
import com.platform.core.glue.GlueTypeEnum;
import io.netty.util.internal.ThrowableUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
public class JobTrigger {
private static Logger logger = LoggerFactory.getLogger(JobTrigger.class);
/**
* trigger job
*
* @param jobId
* @param triggerType
* @param failRetrypublic class JobTriggerPoolHelper {Count >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId);
if (jobInfo == null) {
logger.warn(">>>>>>>>>>>> trigger fail, jobId invalidjobId={}", jobId);
return;
}
if (GlueTypeEnum.BEAN.getDesc().equals(jobInfo.getGlueType())) {
//解密账密
String json = JSONUtils.changeJson(jobInfo.getJobJson(), JSONUtils.decrypt);
jobInfo.setJobJson(json);
}
if (StringUtils.isNotBlank(executorParam)) {
jobInfo.setExecutorParam(executorParam);
}
int finalFailRetryCount = failRetryCount >= 0 ? failRetryCount : jobInfo.getExecutorFailRetryCount();
JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(jobInfo.getJobGroup());
// sharding param
int[] shardingParam = null;
if (executorShardingParam != null) {
String[] shardingArr = executorShardingParam.split("/");
if (shardingArr.length == 2 && isNumeric(shardingArr[0]) && isNumeric(shardingArr[1])) {
shardingParam = new int[2];
shardingParam[0] = Integer.valueOf(shardingArr[0]);
shardingParam[1] = Integer.valueOf(shardingArr[1]);
}
}
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null)
&& group.getRegistryList() != null && !group.getRegistryList().isEmpty()
&& shardingParam == null) {
logger.info("多任务processTrigger开始...");
for (int i = 0; i < group.getRegistryList().size(); i++) {
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, i, group.getRegistryList().size());
}
} else {
if (shardingParam == null) {
shardingParam = new int[]{0, 1};
}
logger.info("单任务processTrigger开始...");
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, shardingParam[0], shardingParam[1]);
}
}
private static boolean isNumeric(String str) {
try {
int result = Integer.valueOf(str);
return true;
} catch (NumberFormatException e) {
return false;
}
}
/**
* @param group job group, registry list may be empty
* @param jobInfo
* @param finalFailRetryCount
* @param triggerType
* @param index sharding index
* @param total sharding index
*/
private static void processTrigger(JobGroup group, JobInfo jobInfo, int finalFailRetryCount, TriggerTypeEnum triggerType, int index, int total) {
TriggerParam triggerParam = new TriggerParam();
// param
ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum
.match(jobInfo.getExecutorBlockStrategy(), ExecutorBlockStrategyEnum.SERIAL_EXECUTION); // block strategy
ExecutorRouteStrategyEnum executorRouteStrategyEnum = ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null); // route strategy
String shardingParam = (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) ? String.valueOf(index).concat("/").concat(String.valueOf(total)) : null;
// 1、save log-id
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MILLISECOND, 0);
Date triggerTime = calendar.getTime();
JobLog jobLog = new JobLog();
jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(triggerTime);
jobLog.setJobDesc(jobInfo.getJobDesc());
long saveCount = JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog);
logger.info(">>>>>>>>>>> service-data-dts trigger start, jobId:{}", jobLog.getId());
logger.info(">>>>>>>>>>> service-data-dts trigger start, saveCount:{}", saveCount);
// 2、init trigger-param
triggerParam.setJobId(jobInfo.getId());
triggerParam.setExecutorHandler(jobInfo.getExecutorHandler());
triggerParam.setExecutorParams(jobInfo.getExecutorParam());
triggerParam.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy());
triggerParam.setExecutorTimeout(jobInfo.getExecutorTimeout());
triggerParam.setLogId(jobLog.getId());
triggerParam.setLogDateTime(jobLog.getTriggerTime().getTime());
triggerParam.setGlueType(jobInfo.getGlueType());
triggerParam.setGlueSource(jobInfo.getGlueSource());
triggerParam.setGlueUpdatetime(jobInfo.getGlueUpdatetime().getTime());
triggerParam.setBroadcastIndex(index);
triggerParam.setBroadcastTotal(total);
triggerParam.setJobJson(jobInfo.getJobJson());
//increment parameter
Integer incrementType = jobInfo.getIncrementType();
if (incrementType != null) {
triggerParam.setIncrementType(incrementType);
if (IncrementTypeEnum.ID.getCode() == incrementType) {
long maxId = getMaxId(jobInfo);
jobLog.setMaxId(maxId);
triggerParam.setEndId(maxId);
triggerParam.setStartId(jobInfo.getIncStartId());
} else if (IncrementTypeEnum.TIME.getCode() == incrementType) {
triggerParam.setStartTime(jobInfo.getIncStartTime());
triggerParam.setTriggerTime(triggerTime);
triggerParam.setReplaceParamType(jobInfo.getReplaceParamType());
} else if (IncrementTypeEnum.PARTITION.getCode() == incrementType) {
triggerParam.setPartitionInfo(jobInfo.getPartitionInfo());
}
triggerParam.setReplaceParam(jobInfo.getReplaceParam());
}
//jvm parameter
triggerParam.setJvmParam(jobInfo.getJvmParam());
// 3、init address
String address = null;
ReturnT<String> routeAddressResult = null;
if (group.getRegistryList() != null && !group.getRegistryList().isEmpty()) {
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) {
if (index < group.getRegistryList().size()) {
address = group.getRegistryList().get(index);
} else {
address = group.getRegistryList().get(0);
}
} else {
routeAddressResult = executorRouteStrategyEnum.getRouter().route(triggerParam, group.getRegistryList());
if (routeAddressResult.getCode() == ReturnT.SUCCESS_CODE) {
address = routeAddressResult.getContent();
}
}
} else {
routeAddressResult = new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("jobconf_trigger_address_empty"));
}
// 4、trigger remote executor
ReturnT<String> triggerResult = null;
if (address != null) {
triggerResult = runExecutor(triggerParam, address);
} else {
triggerResult = new ReturnT<String>(ReturnT.FAIL_CODE, null);
}
// 5、collection trigger info
StringBuffer triggerMsgSb = new StringBuffer();
triggerMsgSb.append(I18nUtil.getString("jobconf_trigger_type")).append("").append(triggerType.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_admin_adress")).append("").append(
IpUtil.getIp());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regtype")).append("")
.append((group.getAddressType() == 0) ? I18nUtil.getString("jobgroup_field_addressType_0") : I18nUtil.getString("jobgroup_field_addressType_1"));
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regaddress")).append("").append(group.getRegistryList());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorRouteStrategy")).append("").append(executorRouteStrategyEnum.getTitle());
if (shardingParam != null) {
triggerMsgSb.append("(" + shardingParam + ")");
}
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorBlockStrategy")).append("").append(blockStrategy.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_timeout")).append("").append(jobInfo.getExecutorTimeout());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorFailRetryCount")).append("").append(finalFailRetryCount);
triggerMsgSb.append("<br><br><span style=\"color:#00c0ef;\" > >>>>>>>>>>>" + I18nUtil.getString("jobconf_trigger_run") + "<<<<<<<<<<< </span><br>")
.append((routeAddressResult != null && routeAddressResult.getMsg() != null) ? routeAddressResult.getMsg() + "<br><br>" : "").append((triggerResult != null && triggerResult.getMsg() != null) ? triggerResult.getMsg() : "");
// 6、save log trigger-info
jobLog.setExecutorAddress(address);
jobLog.setExecutorHandler(jobInfo.getExecutorHandler());
jobLog.setExecutorParam(jobInfo.getExecutorParam());
jobLog.setExecutorShardingParam(shardingParam);
jobLog.setExecutorFailRetryCount(finalFailRetryCount);
jobLog.setTriggerCode(triggerResult.getCode());
jobLog.setTriggerMsg(triggerMsgSb.toString());
int uodateCount = JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(jobLog);
logger.info(">>>>>>>>>>> service-data-dts trigger end, jobId:{}", jobLog.getId());
logger.info(">>>>>>>>>>> service-data-dts trigger end, uodateCount:{}", uodateCount);
}
private static long getMaxId(JobInfo jobInfo) {
JobDatasource datasource = JobAdminConfig.getAdminConfig().getJobDatasourceMapper().selectById(jobInfo.getDatasourceId());
BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource);
return qTool.getMaxIdVal(jobInfo.getReaderTable(), jobInfo.getPrimaryKey());
}
/**
* run executor
*
* @param triggerParam
* @param address
* @return
*/
public static ReturnT<String> runExecutor(TriggerParam triggerParam, String address) {
ReturnT<String> runResult = null;
try {
// 进行任务的触发
ExecutorBiz executorBiz = new ExecutorBizImpl();
runResult = executorBiz.run(triggerParam);
} catch (Exception e) {
logger.error(">>>>>>>>>>> service-data-dts trigger error, please check if the executor[{}] is running.", address, e);
runResult = new ReturnT<String>(ReturnT.FAIL_CODE, ThrowableUtil.stackTraceToString(e));
}
StringBuffer runResultSB = new StringBuffer(I18nUtil.getString("jobconf_trigger_run") + "");
runResultSB.append("<br>address").append(address);
runResultSB.append("<br>code").append(runResult.getCode());
runResultSB.append("<br>msg").append(runResult.getMsg());
runResult.setMsg(runResultSB.toString());
return runResult;
}
}

View File

@@ -0,0 +1,22 @@
package com.platform.admin.core.trigger;
import com.platform.admin.core.util.I18nUtil;
public enum TriggerTypeEnum {
MANUAL(I18nUtil.getString("jobconf_trigger_type_manual")),
CRON(I18nUtil.getString("jobconf_trigger_type_cron")),
RETRY(I18nUtil.getString("jobconf_trigger_type_retry")),
PARENT(I18nUtil.getString("jobconf_trigger_type_parent")),
API(I18nUtil.getString("jobconf_trigger_type_api"));
private TriggerTypeEnum(String title){
this.title = title;
}
private String title;
public String getTitle() {
return title;
}
}

View File

@@ -0,0 +1,75 @@
package com.platform.admin.core.util;
import com.platform.admin.core.conf.JobAdminConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
public class I18nUtil {
private static Logger logger = LoggerFactory.getLogger(I18nUtil.class);
private static Properties prop = null;
public static Properties loadI18nProp(){
if (prop != null) {
return prop;
}
try {
// build i18n prop
String i18n = JobAdminConfig.getAdminConfig().getI18n();
i18n = (i18n!=null && i18n.trim().length()>0)?("_"+i18n):i18n;
String i18nFile = MessageFormat.format("i18n/message{0}.properties", i18n);
// load prop
Resource resource = new ClassPathResource(i18nFile);
EncodedResource encodedResource = new EncodedResource(resource,"UTF-8");
prop = PropertiesLoaderUtils.loadProperties(encodedResource);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return prop;
}
/**
* get val of i18n key
*
* @param key
* @return
*/
public static String getString(String key) {
return loadI18nProp().getProperty(key);
}
/**
* get mult val of i18n mult key, as json
*
* @param keys
* @return
*/
public static String getMultString(String... keys) {
Map<String, String> map = new HashMap<String, String>();
Properties prop = loadI18nProp();
if (keys!=null && keys.length>0) {
for (String key: keys) {
map.put(key, prop.getProperty(key));
}
} else {
for (String key: prop.stringPropertyNames()) {
map.put(key, prop.getProperty(key));
}
}
String json = JacksonUtil.writeValueAsString(map);
return json;
}
}

View File

@@ -0,0 +1,85 @@
package com.platform.admin.core.util;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class JacksonUtil {
private static Logger logger = LoggerFactory.getLogger(JacksonUtil.class);
private final static ObjectMapper objectMapper = new ObjectMapper();
public static ObjectMapper getInstance() {
return objectMapper;
}
/**
* bean、array、List、Map --> json
*
* @param obj
* @return json string
* @throws Exception
*/
public static String writeValueAsString(Object obj) {
try {
return getInstance().writeValueAsString(obj);
} catch (JsonGenerationException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> bean、Map、List(array)
*
* @param jsonStr
* @param clazz
* @return obj
* @throws Exception
*/
public static <T> T readValue(String jsonStr, Class<T> clazz) {
try {
return getInstance().readValue(jsonStr, clazz);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> List<Bean>...
*
* @param jsonStr
* @param parametrized
* @param parameterClasses
* @param <T>
* @return
*/
public static <T> T readValue(String jsonStr, Class<?> parametrized, Class<?>... parameterClasses) {
try {
JavaType javaType = getInstance().getTypeFactory().constructParametricType(parametrized, parameterClasses);
return getInstance().readValue(jsonStr, javaType);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
}

View File

@@ -0,0 +1,129 @@
package com.platform.admin.core.util;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class LocalCacheUtil {
private static ConcurrentMap<String, LocalCacheData> cacheRepository = new ConcurrentHashMap<String, LocalCacheData>(); // 类型建议用抽象父类,兼容性更好;
private static class LocalCacheData{
private String key;
private Object val;
private long timeoutTime;
public LocalCacheData() {
}
public LocalCacheData(String key, Object val, long timeoutTime) {
this.key = key;
this.val = val;
this.timeoutTime = timeoutTime;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public Object getVal() {
return val;
}
public void setVal(Object val) {
this.val = val;
}
public long getTimeoutTime() {
return timeoutTime;
}
public void setTimeoutTime(long timeoutTime) {
this.timeoutTime = timeoutTime;
}
}
/**
* set cache
*
* @param key
* @param val
* @param cacheTime
* @return
*/
public static boolean set(String key, Object val, long cacheTime){
// clean timeout cache, before set new cache (avoid cache too much)
cleanTimeoutCache();
// set new cache
if (key==null || key.trim().length()==0) {
return false;
}
if (val == null) {
remove(key);
}
if (cacheTime <= 0) {
remove(key);
}
long timeoutTime = System.currentTimeMillis() + cacheTime;
LocalCacheData localCacheData = new LocalCacheData(key, val, timeoutTime);
cacheRepository.put(localCacheData.getKey(), localCacheData);
return true;
}
/**
* remove cache
*
* @param key
* @return
*/
public static boolean remove(String key){
if (key==null || key.trim().length()==0) {
return false;
}
cacheRepository.remove(key);
return true;
}
/**
* get cache
*
* @param key
* @return
*/
public static Object get(String key){
if (key==null || key.trim().length()==0) {
return null;
}
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()<localCacheData.getTimeoutTime()) {
return localCacheData.getVal();
} else {
remove(key);
return null;
}
}
/**
* clean timeout cache
*
* @return
*/
public static boolean cleanTimeoutCache(){
if (!cacheRepository.keySet().isEmpty()) {
for (String key: cacheRepository.keySet()) {
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()>=localCacheData.getTimeoutTime()) {
cacheRepository.remove(key);
}
}
}
return true;
}
}

View File

@@ -0,0 +1,15 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
*
* @author AllDataDC
* @ClassName clickhouse reader dto
* @date 2022/9/29
*/
@Data
public class ClickhouseReaderDto implements Serializable {
}

View File

@@ -0,0 +1,15 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
*
* @author AllDataDC
* @ClassName clickhouse write dto
* @date 2022/9/29
*/
@Data
public class ClickhouseWriterDto implements Serializable {
}

View File

@@ -0,0 +1,32 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author AllDataDC
* @ClassName FlinkXJsonDto
* @Version 2.1.2
* @date 2022/05/05 17:15
*/
@Data
public class FlinkXBatchJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private Long writerDatasourceId;
private List<String> writerTables;
private int templateId;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
}

View File

@@ -0,0 +1,50 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author AllDataDC
* @ClassName FlinkxJsonDto
* @Version 2.1.1
* @date 2022/03/14 07:15
*/
@Data
public class FlinkXJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private List<String> readerColumns;
private Long writerDatasourceId;
private List<String> writerTables;
private List<String> writerColumns;
private HiveReaderDto hiveReader;
private HiveWriterDto hiveWriter;
private HbaseReaderDto hbaseReader;
private HbaseWriterDto hbaseWriter;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
private MongoDBReaderDto mongoDBReader;
private MongoDBWriterDto mongoDBWriter;
private ClickhouseReaderDto clickhouseReader;
private ClickhouseWriterDto clickhouseWriter;
}

View File

@@ -0,0 +1,17 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class HbaseReaderDto implements Serializable {
private String readerMaxVersion;
private String readerMode;
private Range readerRange;
}

View File

@@ -0,0 +1,17 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class HbaseWriterDto implements Serializable {
private String writeNullMode;
private String writerMode;
private String writerRowkeyColumn;
private VersionColumn writerVersionColumn;
}

View File

@@ -0,0 +1,27 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建hive reader dto
*
* @author AllDataDC
* @ClassName hive reader
* @date 2022/01/11 17:15
*/
@Data
public class HiveReaderDto implements Serializable {
private String readerPath;
private String readerDefaultFS;
private String readerFileType;
private String readerFieldDelimiter;
private Boolean readerSkipHeader;
}

View File

@@ -0,0 +1,28 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建hive write dto
*
* @author AllDataDC
* @ClassName hive write dto
* @date 2022/01/11 17:15
*/
@Data
public class HiveWriterDto implements Serializable {
private String writerDefaultFS;
private String writerFileType;
private String writerPath;
private String writerFileName;
private String writeMode;
private String writeFieldDelimiter;
}

View File

@@ -0,0 +1,19 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建mongodb reader dto
*
* @author AllDataDC
* @ClassName mongodb reader
* @Version 2.1.1
* @date 2022/03/14 07:15
*/
@Data
public class MongoDBReaderDto implements Serializable {
}

View File

@@ -0,0 +1,20 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建mongodb write dto
*
* @author AllDataDC
* @ClassName mongodb write dto
* @Version 2.1.1
* @date 2022/03/14 07:15
*/
@Data
public class MongoDBWriterDto implements Serializable {
private UpsertInfo upsertInfo;
}

View File

@@ -0,0 +1,15 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class Range implements Serializable {
private String startRowkey;
private String endRowkey;
private Boolean isBinaryRowkey;
}

View File

@@ -0,0 +1,23 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author AllDataDC
* @ClassName RdbmsReaderDto
* @date 2022/01/11 17:15
*/
@Data
public class RdbmsReaderDto implements Serializable {
private String readerSplitPk;
private String whereParams;
private String querySql;
}

View File

@@ -0,0 +1,20 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 构建json dto
*
* @author AllDataDC
* @ClassName RdbmsWriteDto
* @date 2022/01/11 17:15
*/
@Data
public class RdbmsWriterDto implements Serializable {
private String preSql;
private String postSql;
}

View File

@@ -0,0 +1,86 @@
package com.platform.admin.dto;
public class TaskScheduleDto {
/**
* 所选作业类型:
* 1 -> 每天
* 2 -> 每月
* 3 -> 每周
*/
Integer jobType;
/**
* 一周的哪几天
*/
Integer[] dayOfWeeks;
/**
* 一个月的哪几天
*/
Integer[] dayOfMonths;
/**
* 秒
*/
Integer second;
/**
* 分
*/
Integer minute;
/**
* 时
*/
Integer hour;
public Integer getJobType() {
return jobType;
}
public void setJobType(Integer jobType) {
this.jobType = jobType;
}
public Integer[] getDayOfWeeks() {
return dayOfWeeks;
}
public void setDayOfWeeks(Integer[] dayOfWeeks) {
this.dayOfWeeks = dayOfWeeks;
}
public Integer[] getDayOfMonths() {
return dayOfMonths;
}
public void setDayOfMonths(Integer[] dayOfMonths) {
this.dayOfMonths = dayOfMonths;
}
public Integer getSecond() {
return second;
}
public void setSecond(Integer second) {
this.second = second;
}
public Integer getMinute() {
return minute;
}
public void setMinute(Integer minute) {
this.minute = minute;
}
public Integer getHour() {
return hour;
}
public void setHour(Integer hour) {
this.hour = hour;
}
}

View File

@@ -0,0 +1,19 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
* 用于启动任务接收的实体
*
* @author AllDataDC
* @ClassName TriggerJobDto * @date 2022/12/01 16:12
*/
@Data
public class TriggerJobDto implements Serializable {
private String executorParam;
private int jobId;
}

View File

@@ -0,0 +1,18 @@
package com.platform.admin.dto;
import lombok.Data;
/**
* Created by mac on 2020/3/16.
*/
@Data
public class UpsertInfo {
/**
* 当设置为true时表示针对相同的upsertKey做更新操作
*/
private Boolean isUpsert;
/**
* upsertKey指定了没行记录的业务主键。用来做更新时使用。
*/
private String upsertKey;
}

View File

@@ -0,0 +1,11 @@
package com.platform.admin.dto;
import lombok.Data;
@Data
public class VersionColumn {
private Integer index;
private String value;
}

View File

@@ -0,0 +1,18 @@
package com.platform.admin.entity;
/**
*
* @author AllDataDC
* @date 2022/10/7 11:21
* 常量描述类
**/
public class Common {
public static String PREFEX ="http://";
public static String LOCALADDRESS ="127.0.0.1";
public static String PORT ="8080";
public static String SERVERPORT ="server.port";
public static String CONTEXTPATH ="";
public static String SERVERCONTEXTPATH ="server.contextPath";
public static String DOCPATH="/doc.html";
public static String IndexPATH="/index.html";
}

View File

@@ -0,0 +1,143 @@
package com.platform.admin.entity;
import com.alibaba.fastjson.annotation.JSONField;
import com.baomidou.mybatisplus.annotation.*;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import com.platform.admin.core.handler.AESEncryptHandler;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* jdbc数据源配置实体类(job_jdbc_datasource)
*
* @author AllDataDC
* @version v1.0
* @date 2022-07-30
*/
@Data
@ApiModel
@TableName("job_jdbc_datasource")
public class JobDatasource extends Model<JobDatasource> {
/**
* 自增主键
*/
@TableId
@ApiModelProperty(value = "自增主键")
private Long id;
/**
* 数据源名称
*/
@ApiModelProperty(value = "数据源名称")
private String datasourceName;
/**
* 数据源
*/
@ApiModelProperty(value = "数据源")
private String datasource;
/**
* 数据源分组
*/
@ApiModelProperty(value = "数据源分组")
private String datasourceGroup;
/**
* 用户名
* AESEncryptHandler 加密类
* MyBatis Plus 3.0.7.1之前版本没有typeHandler属性需要升级到最低3.1.2
*/
@ApiModelProperty(value = "用户名")
@TableField(typeHandler = AESEncryptHandler.class)
private String jdbcUsername;
/**
* 密码
*/
@TableField(typeHandler = AESEncryptHandler.class)
@ApiModelProperty(value = "密码")
private String jdbcPassword;
/**
* jdbc url
*/
@ApiModelProperty(value = "jdbc url")
private String jdbcUrl;
/**
* jdbc驱动类
*/
@ApiModelProperty(value = "jdbc驱动类")
private String jdbcDriverClass;
/**
* 状态0删除 1启用 2禁用
*/
@TableLogic
@ApiModelProperty(value = "状态0删除 1启用 2禁用")
private Integer status;
/**
* 创建人
*/
@TableField(fill = FieldFill.INSERT)
@ApiModelProperty(value = "创建人", hidden = true)
private String createBy;
/**
* 创建时间
*/
@TableField(fill = FieldFill.INSERT)
@JSONField(format = "yyyy/MM/dd")
@ApiModelProperty(value = "创建时间", hidden = true)
private Date createDate;
/**
* 更新人
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
@ApiModelProperty(value = "更新人", hidden = true)
private String updateBy;
/**
* 更新时间
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
@JSONField(format = "yyyy/MM/dd")
@ApiModelProperty(value = "更新时间", hidden = true)
private Date updateDate;
/**
* 备注
*/
@ApiModelProperty(value = "备注", hidden = true)
private String comments;
/**
* zookeeper地址
*/
@ApiModelProperty(value = "zookeeper地址", hidden = true)
private String zkAdress;
/**
* 数据库名
*/
@ApiModelProperty(value = "数据库名", hidden = true)
private String databaseName;
/**
* 获取主键值
*
* @return 主键值
*/
@Override
protected Serializable pkVal() {
return this.id;
}
}

View File

@@ -0,0 +1,82 @@
package com.platform.admin.entity;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class JobGroup {
@ApiModelProperty("执行器Id")
private int id;
@ApiModelProperty("执行器AppName")
private String appName;
@ApiModelProperty("执行器名称")
private String title;
@ApiModelProperty("排序")
private int order;
@ApiModelProperty("执行器地址类型0=自动注册、1=手动录入")
private int addressType;
@ApiModelProperty("执行器地址列表,多地址逗号分隔(手动录入)")
private String addressList;
// registry list
private List<String> registryList; // 执行器地址列表(系统注册)
public List<String> getRegistryList() {
if (addressList!=null && addressList.trim().length()>0) {
registryList = new ArrayList<>(Arrays.asList(addressList.split(",")));
}
return registryList;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
public int getAddressType() {
return addressType;
}
public void setAddressType(int addressType) {
this.addressType = addressType;
}
public String getAddressList() {
return addressList;
}
public void setAddressList(String addressList) {
this.addressList = addressList;
}
}

View File

@@ -0,0 +1,121 @@
package com.platform.admin.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
@Data
public class JobInfo {
@ApiModelProperty("主键ID")
private int id;
@ApiModelProperty("执行器主键ID")
private int jobGroup;
@ApiModelProperty("任务执行CRON表达式")
private String jobCron;
@ApiModelProperty("排序")
private String jobDesc;
private Date addTime;
private Date updateTime;
@ApiModelProperty("修改用户")
private Long userId;
@ApiModelProperty("报警邮件")
private String alarmEmail;
@ApiModelProperty("执行器路由策略")
private String executorRouteStrategy;
@ApiModelProperty("执行器任务Handler名称")
private String executorHandler;
@ApiModelProperty("执行器,任务参数")
private String executorParam;
@ApiModelProperty("阻塞处理策略")
private String executorBlockStrategy;
@ApiModelProperty("任务执行超时时间,单位秒")
private int executorTimeout;
@ApiModelProperty("失败重试次数")
private int executorFailRetryCount;
@ApiModelProperty("GLUE类型\t#com.guoliang.flinkx.core.glue.GlueTypeEnum")
private String glueType;
@ApiModelProperty("GLUE源代码")
private String glueSource;
@ApiModelProperty("GLUE备注")
private String glueRemark;
@ApiModelProperty("GLUE更新时间")
private Date glueUpdatetime;
@ApiModelProperty("子任务ID")
private String childJobId;
@ApiModelProperty("调度状态0-停止1-运行")
private int triggerStatus;
@ApiModelProperty("上次调度时间")
private long triggerLastTime;
@ApiModelProperty("下次调度时间")
private long triggerNextTime;
@ApiModelProperty("flinkx运行json")
private String jobJson;
@ApiModelProperty("脚本动态参数")
private String replaceParam;
@ApiModelProperty("增量日期格式")
private String replaceParamType;
@ApiModelProperty("jvm参数")
private String jvmParam;
@ApiModelProperty("增量初始时间")
private Date incStartTime;
@ApiModelProperty("分区信息")
private String partitionInfo;
@ApiModelProperty("最近一次执行状态")
private int lastHandleCode;
@ApiModelProperty("所属项目Id")
private int projectId;
@ApiModelProperty("主键字段")
private String primaryKey;
@ApiModelProperty("增量初始id")
private Long incStartId;
@ApiModelProperty("增量方式")
private int incrementType;
@ApiModelProperty("flinkx的读表")
private String readerTable;
@ApiModelProperty("数据源id")
private int datasourceId;
@TableField(exist=false)
private String projectName;
@TableField(exist=false)
private String userName;
}

View File

@@ -0,0 +1,61 @@
package com.platform.admin.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
@Data
public class JobLog {
private long id;
// job info
@ApiModelProperty("执行器主键ID")
private int jobGroup;
@ApiModelProperty("任务主键ID")
private int jobId;
@ApiModelProperty("任务描述")
private String jobDesc;
// execute info
@ApiModelProperty("执行器地址,本次执行的地址")
private String executorAddress;
@ApiModelProperty("执行器任务handler")
private String executorHandler;
@ApiModelProperty("执行器任务参数")
private String executorParam;
@ApiModelProperty("执行器任务分片参数,格式如 1/2")
private String executorShardingParam;
@ApiModelProperty("失败重试次数")
private int executorFailRetryCount;
// trigger info
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@ApiModelProperty("调度-时间")
private Date triggerTime;
@ApiModelProperty("调度-结果")
private int triggerCode;
@ApiModelProperty("调度-日志")
private String triggerMsg;
// handle info
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
@ApiModelProperty("执行-时间")
private Date handleTime;
@ApiModelProperty("执行-状态")
private int handleCode;
@ApiModelProperty("执行-日志")
private String handleMsg;
// alarm info
@ApiModelProperty("告警状态0-默认、1-无需告警、2-告警成功、3-告警失败")
private int alarmStatus;
@ApiModelProperty("FlinkX进程Id")
private String processId;
@ApiModelProperty("增量最大id")
private Long maxId;
}

View File

@@ -0,0 +1,29 @@
package com.platform.admin.entity;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
@Data
public class JobLogGlue {
private int id;
@ApiModelProperty("任务主键ID")
private int jobId;
@ApiModelProperty("GLUE类型\t#com.xxl.job.core.glue.GlueTypeEnum")
private String glueType;
@ApiModelProperty("GLUE源代码")
private String glueSource;
@ApiModelProperty("GLUE备注")
private String glueRemark;
private Date addTime;
private Date updateTime;
}

View File

@@ -0,0 +1,17 @@
package com.platform.admin.entity;
import lombok.Data;
import java.util.Date;
@Data
public class JobLogReport {
private int id;
private Date triggerDay;
private int runningCount;
private int sucCount;
private int failCount;
}

View File

@@ -0,0 +1,57 @@
package com.platform.admin.entity;
public class JobPermission {
private int id;
//权限名称
private String name;
//权限描述
private String descritpion;
//授权链接
private String url;
//父节点id
private int pid;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescritpion() {
return descritpion;
}
public void setDescritpion(String descritpion) {
this.descritpion = descritpion;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public int getPid() {
return pid;
}
public void setPid(int pid) {
this.pid = pid;
}
}

View File

@@ -0,0 +1,41 @@
package com.platform.admin.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
@Data
public class JobProject {
@ApiModelProperty("项目Id")
private int id;
@ApiModelProperty("项目名称")
private String name;
@ApiModelProperty("项目描述")
private String description;
@ApiModelProperty("用户Id")
private Long userId;
@ApiModelProperty("标记")
private Boolean flag;
@ApiModelProperty("创建时间")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createTime;
@ApiModelProperty("更新时间")
private Date updateTime;
@TableField(exist=false)
private String userName;
}

View File

@@ -0,0 +1,20 @@
package com.platform.admin.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import java.util.Date;
@Data
public class JobRegistry {
private int id;
private String registryGroup;
private String registryKey;
private String registryValue;
private double cpuUsage;
private double memoryUsage;
private double loadAverage;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date updateTime;
}

View File

@@ -0,0 +1,27 @@
package com.platform.admin.entity;
import io.swagger.annotations.ApiModelProperty;
public class JobRole {
private int id;
@ApiModelProperty("账号")
private String name;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}

View File

@@ -0,0 +1,87 @@
package com.platform.admin.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.util.Date;
@Data
public class JobTemplate {
@ApiModelProperty("主键ID")
private int id;
@ApiModelProperty("执行器主键ID")
private int jobGroup;
@ApiModelProperty("任务执行CRON表达式")
private String jobCron;
@ApiModelProperty("排序")
private String jobDesc;
private Date addTime;
private Date updateTime;
@ApiModelProperty("修改用户")
private Long userId;
@ApiModelProperty("报警邮件")
private String alarmEmail;
@ApiModelProperty("执行器路由策略")
private String executorRouteStrategy;
@ApiModelProperty("执行器任务Handler名称")
private String executorHandler;
@ApiModelProperty("执行器,任务参数")
private String executorParam;
@ApiModelProperty("阻塞处理策略")
private String executorBlockStrategy;
@ApiModelProperty("任务执行超时时间,单位秒")
private int executorTimeout;
@ApiModelProperty("失败重试次数")
private int executorFailRetryCount;
@ApiModelProperty("GLUE类型\t#com.guoliang.flinkx.core.glue.GlueTypeEnum")
private String glueType;
@ApiModelProperty("GLUE源代码")
private String glueSource;
@ApiModelProperty("GLUE备注")
private String glueRemark;
@ApiModelProperty("GLUE更新时间")
private Date glueUpdatetime;
@ApiModelProperty("子任务ID")
private String childJobId;
@ApiModelProperty("上次调度时间")
private long triggerLastTime;
@ApiModelProperty("下次调度时间")
private long triggerNextTime;
@ApiModelProperty("flinkx运行json")
private String jobJson;
@ApiModelProperty("jvm参数")
private String jvmParam;
@ApiModelProperty("所属项目")
private int projectId;
@TableField(exist=false)
private String projectName;
@TableField(exist=false)
private String userName;
}

View File

@@ -0,0 +1,75 @@
package com.platform.admin.entity;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.util.StringUtils;
public class JobUser {
private int id;
@ApiModelProperty("账号")
private String username;
@ApiModelProperty("密码")
private String password;
@ApiModelProperty("角色0-普通用户、1-管理员")
private String role;
@ApiModelProperty("权限执行器ID列表多个逗号分割")
private String permission;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public String getPermission() {
return permission;
}
public void setPermission(String permission) {
this.permission = permission;
}
// plugin
public boolean validPermission(int jobGroup){
if ("1".equals(this.role)) {
return true;
} else {
if (StringUtils.hasText(this.permission)) {
for (String permissionItem : this.permission.split(",")) {
if (String.valueOf(jobGroup).equals(permissionItem)) {
return true;
}
}
}
return false;
}
}
}

View File

@@ -0,0 +1,82 @@
package com.platform.admin.entity;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
import java.util.Collections;
public class JwtUser implements UserDetails {
private Integer id;
private String username;
private String password;
private Collection<? extends GrantedAuthority> authorities;
public JwtUser() {
}
// 写一个能直接使用user创建jwtUser的构造器
public JwtUser(JobUser user) {
id = user.getId();
username = user.getUsername();
password = user.getPassword();
authorities = Collections.singleton(new SimpleGrantedAuthority(user.getRole()));
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return authorities;
}
@Override
public String getPassword() {
return password;
}
@Override
public String getUsername() {
return username;
}
@Override
public boolean isAccountNonExpired() {
return true;
}
@Override
public boolean isAccountNonLocked() {
return true;
}
@Override
public boolean isCredentialsNonExpired() {
return true;
}
@Override
public boolean isEnabled() {
return true;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Override
public String toString() {
return "JwtUser{" +
"id=" + id +
", username='" + username + '\'' +
", password='" + password + '\'' +
", authorities=" + authorities +
'}';
}
}

View File

@@ -0,0 +1,13 @@
package com.platform.admin.entity;
import lombok.Data;
@Data
public class LoginUser {
private String username;
private String password;
private Integer rememberMe;
}

View File

@@ -0,0 +1,22 @@
package com.platform.admin.exception;
import com.baomidou.mybatisplus.extension.api.R;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
/**
* Created by jwk on 2019/07/05.
* 全局异常处理
* @author AllDataDC
*/
@Slf4j
@RestControllerAdvice
public class GlobalExceptionHandler {
@ExceptionHandler(Exception.class)
public R handleException(Exception e){
log.error("系统异常{0}",e);
return R.failed(e.getMessage());
}
}

View File

@@ -0,0 +1,25 @@
package com.platform.admin.exception;
/**
* 自定义异常
*/
public class TokenIsExpiredException extends Exception{
public TokenIsExpiredException() {
}
public TokenIsExpiredException(String message) {
super(message);
}
public TokenIsExpiredException(String message, Throwable cause) {
super(message, cause);
}
public TokenIsExpiredException(Throwable cause) {
super(cause);
}
public TokenIsExpiredException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}

View File

@@ -0,0 +1,90 @@
package com.platform.admin.filter;
import com.alibaba.fastjson.JSON;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.platform.core.biz.model.ReturnT;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JwtUser;
import com.platform.admin.entity.LoginUser;
import com.platform.admin.util.JwtTokenUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static com.platform.core.util.Constants.SPLIT_COMMA;
@Slf4j
public class JWTAuthenticationFilter extends UsernamePasswordAuthenticationFilter {
private ThreadLocal<Integer> rememberMe = new ThreadLocal<>();
private AuthenticationManager authenticationManager;
public JWTAuthenticationFilter(AuthenticationManager authenticationManager) {
this.authenticationManager = authenticationManager;
super.setFilterProcessesUrl("/api/auth/login");
}
@Override
public Authentication attemptAuthentication(HttpServletRequest request,
HttpServletResponse response) throws AuthenticationException {
// 从输入流中获取到登录的信息
try {
LoginUser loginUser = new ObjectMapper().readValue(request.getInputStream(), LoginUser.class);
rememberMe.set(loginUser.getRememberMe());
return authenticationManager.authenticate(
new UsernamePasswordAuthenticationToken(loginUser.getUsername(), loginUser.getPassword(), new ArrayList<>())
);
} catch (IOException e) {
logger.error("attemptAuthentication error :{}",e);
return null;
}
}
// 成功验证后调用的方法
// 如果验证成功就生成token并返回
@Override
protected void successfulAuthentication(HttpServletRequest request,
HttpServletResponse response,
FilterChain chain,
Authentication authResult) throws IOException {
JwtUser jwtUser = (JwtUser) authResult.getPrincipal();
boolean isRemember = rememberMe.get() == 1;
String role = "";
Collection<? extends GrantedAuthority> authorities = jwtUser.getAuthorities();
for (GrantedAuthority authority : authorities){
role = authority.getAuthority();
}
String token = JwtTokenUtils.createToken(jwtUser.getId(),jwtUser.getUsername(), role, isRemember);
response.setHeader("token", JwtTokenUtils.TOKEN_PREFIX + token);
response.setCharacterEncoding("UTF-8");
Map<String, Object> maps = new HashMap<>();
maps.put("data", JwtTokenUtils.TOKEN_PREFIX + token);
maps.put("roles", role.split(SPLIT_COMMA));
response.getWriter().write(JSON.toJSONString(new ReturnT<>(maps)));
}
@Override
protected void unsuccessfulAuthentication(HttpServletRequest request, HttpServletResponse response, AuthenticationException failed) throws IOException, ServletException {
response.setCharacterEncoding("UTF-8");
response.getWriter().write(JSON.toJSON(new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("login_param_invalid"))).toString());
}
}

View File

@@ -0,0 +1,71 @@
package com.platform.admin.filter;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.exception.TokenIsExpiredException;
import com.platform.admin.util.JwtTokenUtils;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Collections;
public class JWTAuthorizationFilter extends BasicAuthenticationFilter {
public JWTAuthorizationFilter(AuthenticationManager authenticationManager) {
super(authenticationManager);
}
@Override
protected void doFilterInternal(HttpServletRequest request,
HttpServletResponse response,
FilterChain chain) throws IOException, ServletException {
String tokenHeader = request.getHeader(JwtTokenUtils.TOKEN_HEADER);
// 如果请求头中没有Authorization信息则直接放行
if (tokenHeader == null || !tokenHeader.startsWith(JwtTokenUtils.TOKEN_PREFIX)) {
chain.doFilter(request, response);
return;
}
// 如果请求头中有token则进行解析并且设置认证信息
try {
SecurityContextHolder.getContext().setAuthentication(getAuthentication(tokenHeader));
} catch (TokenIsExpiredException e) {
//返回json形式的错误信息
response.setCharacterEncoding("UTF-8");
response.setContentType("application/json; charset=utf-8");
response.getWriter().write(JSON.toJSONString(R.failed(e.getMessage())));
response.getWriter().flush();
return;
}
super.doFilterInternal(request, response, chain);
}
// 这里从token中获取用户信息并新建一个token
private UsernamePasswordAuthenticationToken getAuthentication(String tokenHeader) throws TokenIsExpiredException {
String token = tokenHeader.replace(JwtTokenUtils.TOKEN_PREFIX, "");
boolean expiration = JwtTokenUtils.isExpiration(token);
if (expiration) {
throw new TokenIsExpiredException("登录时间过长,请退出重新登录");
}
else {
String username = JwtTokenUtils.getUsername(token);
String role = JwtTokenUtils.getUserRole(token);
if (username != null) {
return new UsernamePasswordAuthenticationToken(username, null,
Collections.singleton(new SimpleGrantedAuthority(role))
);
}
}
return null;
}
}

View File

@@ -0,0 +1,18 @@
package com.platform.admin.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.platform.admin.entity.JobDatasource;
import org.apache.ibatis.annotations.Mapper;
/**
* jdbc数据源配置表数据库访问层
*
* @author AllDataDC
* @version v1.0
* @date 2022-07-30
*/
@Mapper
public interface JobDatasourceMapper extends BaseMapper<JobDatasource> {
int update(JobDatasource datasource);
}

View File

@@ -0,0 +1,26 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobGroup;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface JobGroupMapper {
List<JobGroup> findAll();
List<JobGroup> find(@Param("appName") String appName,
@Param("title") String title,
@Param("addressList") String addressList);
int save(JobGroup jobGroup);
List<JobGroup> findByAddressType(@Param("addressType") int addressType);
int update(JobGroup jobGroup);
int remove(@Param("id") int id);
JobGroup load(@Param("id") int id);
}

View File

@@ -0,0 +1,54 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobInfo;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
@Mapper
public interface JobInfoMapper {
List<JobInfo> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc,
@Param("glueType") String glueType,
@Param("userId") int userId,
@Param("projectIds") Integer[] projectIds);
int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("triggerStatus") int triggerStatus,
@Param("jobDesc") String jobDesc,
@Param("glueType") String glueType,
@Param("userId") int userId,
@Param("projectIds") Integer[] projectIds);
List<JobInfo> findAll();
int save(JobInfo info);
JobInfo loadById(@Param("id") int id);
int update(JobInfo jobInfo);
int delete(@Param("id") long id);
List<JobInfo> getJobsByGroup(@Param("jobGroup") int jobGroup);
int findAllCount();
List<JobInfo> scheduleJobQuery(@Param("maxNextTime") long maxNextTime, @Param("pagesize") int pagesize);
int scheduleUpdate(JobInfo xxlJobInfo);
int incrementTimeUpdate(@Param("id") int id, @Param("incStartTime") Date incStartTime);
public int updateLastHandleCode(@Param("id") int id,@Param("lastHandleCode")int lastHandleCode);
void incrementIdUpdate(@Param("id") int id, @Param("incStartId")Long incStartId);
}

View File

@@ -0,0 +1,21 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobLogGlue;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface JobLogGlueMapper {
int save(JobLogGlue jobLogGlue);
List<JobLogGlue> findByJobId(@Param("jobId") int jobId);
int removeOld(@Param("jobId") int jobId, @Param("limit") int limit);
int deleteByJobId(@Param("jobId") int jobId);
}

View File

@@ -0,0 +1,61 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobLog;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
import java.util.Map;
@Mapper
public interface JobLogMapper {
// exist jobId not use jobGroup, not exist use jobGroup
List<JobLog> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus);
int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("triggerTimeStart") Date triggerTimeStart,
@Param("triggerTimeEnd") Date triggerTimeEnd,
@Param("logStatus") int logStatus);
JobLog load(@Param("id") long id);
long save(JobLog jobLog);
int updateTriggerInfo(JobLog jobLog);
int updateHandleInfo(JobLog jobLog);
int updateProcessId(@Param("id") long id,
@Param("processId") String processId);
int delete(@Param("jobId") int jobId);
Map<String, Object> findLogReport(@Param("from") Date from,
@Param("to") Date to);
List<Long> findClearLogIds(@Param("jobGroup") int jobGroup,
@Param("jobId") int jobId,
@Param("clearBeforeTime") Date clearBeforeTime,
@Param("clearBeforeNum") int clearBeforeNum,
@Param("pagesize") int pagesize);
int clearLog(@Param("logIds") List<Long> logIds);
List<Long> findFailJobLogIds(@Param("pagesize") int pagesize);
int updateAlarmStatus(@Param("logId") long logId,
@Param("oldAlarmStatus") int oldAlarmStatus,
@Param("newAlarmStatus") int newAlarmStatus);
}

View File

@@ -0,0 +1,22 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobLogReport;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
@Mapper
public interface JobLogReportMapper {
int save(JobLogReport xxlJobLogReport);
int update(JobLogReport xxlJobLogReport);
List<JobLogReport> queryLogReport(@Param("triggerDayFrom") Date triggerDayFrom,
@Param("triggerDayTo") Date triggerDayTo);
JobLogReport queryLogReportTotal();
}

View File

@@ -0,0 +1,26 @@
package com.platform.admin.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.platform.admin.entity.JobProject;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
/**
* Project
*
* @author AllDataDC
* @version v2.1.12
* @date 2022-05-24
*/
@Mapper
public interface JobProjectMapper extends BaseMapper<JobProject> {
/**
* project page
* @param page
* @param searchName
* @return
*/
IPage<JobProject> getProjectListPaging(IPage<JobProject> page,
@Param("searchName") String searchName);
}

View File

@@ -0,0 +1,43 @@
package com.platform.admin.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.platform.admin.entity.JobRegistry;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.Date;
import java.util.List;
@Mapper
public interface JobRegistryMapper extends BaseMapper<JobRegistry> {
public List<Integer> findDead(@Param("timeout") int timeout,
@Param("nowTime") Date nowTime);
public int removeDead(@Param("ids") List<Integer> ids);
public List<JobRegistry> findAll(@Param("timeout") int timeout,
@Param("nowTime") Date nowTime);
public int registryUpdate(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey,
@Param("registryValue") String registryValue,
@Param("cpuUsage") double cpuUsage,
@Param("memoryUsage") double memoryUsage,
@Param("loadAverage") double loadAverage,
@Param("updateTime") Date updateTime);
public int registrySave(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey,
@Param("registryValue") String registryValue,
@Param("cpuUsage") double cpuUsage,
@Param("memoryUsage") double memoryUsage,
@Param("loadAverage") double loadAverage,
@Param("updateTime") Date updateTime);
public int registryDelete(@Param("registryGroup") String registryGroup,
@Param("registryKey") String registryKey,
@Param("registryValue") String registryValue);
}

View File

@@ -0,0 +1,36 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobTemplate;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@Mapper
public interface JobTemplateMapper {
public List<JobTemplate> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobDesc") String jobDesc,
@Param("executorHandler") String executorHandler,
@Param("userId") int userId,
@Param("projectIds") Integer[] projectIds);
public int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("jobGroup") int jobGroup,
@Param("jobDesc") String jobDesc,
@Param("executorHandler") String executorHandler,
@Param("userId") int userId,
@Param("projectIds") Integer[] projectIds);
public int save(JobTemplate info);
public JobTemplate loadById(@Param("id") int id);
public int update(JobTemplate jobTemplate);
public int delete(@Param("id") long id);
}

View File

@@ -0,0 +1,37 @@
package com.platform.admin.mapper;
import com.platform.admin.entity.JobUser;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Mapper
public interface JobUserMapper {
List<JobUser> pageList(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("username") String username);
List<JobUser> findAll(@Param("username") String username);
int pageListCount(@Param("offset") int offset,
@Param("pagesize") int pagesize,
@Param("username") String username);
JobUser loadByUserName(@Param("username") String username);
JobUser getUserById(@Param("id") int id);
List<JobUser> getUsersByIds(@Param("ids") String[] ids);
int save(JobUser jobUser);
int update(JobUser jobUser);
int delete(@Param("id") int id);
}

Some files were not shown because too many files have changed in this diff Show More