This commit is contained in:
Jane
2023-12-22 10:59:10 +08:00
parent 751c43e199
commit d1ede2d4aa
2774 changed files with 291509 additions and 0 deletions

View File

@@ -0,0 +1,64 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>modules</artifactId>
<groupId>com.platform</groupId>
<version>0.4.x</version>
</parent>
<groupId>com.platform</groupId>
<artifactId>service-data-dts-parent</artifactId>
<version>0.4.x</version>
<name>service-data-dts-parent</name>
<packaging>pom</packaging>
<modules>
<module>service-data-dts</module>
<module>service-data-core</module>
<module>service-data-rpc</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<maven.compiler.encoding>UTF-8</maven.compiler.encoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.test.skip>true</maven.test.skip>
<commons-lang3.version>3.3.2</commons-lang3.version>
<fastjson.version>1.2.70</fastjson.version>
<slf4j-api.version>1.7.28</slf4j-api.version>
<logback-classic.version>1.2.2</logback-classic.version>
<commons-io.version>2.4</commons-io.version>
<junit.version>4.12</junit.version>
<hutool.version>5.4.3</hutool.version>
<postgresql.version>42.2.5</postgresql.version>
<mysql-connector.version>5.1.47</mysql-connector.version>
<jna.version>5.8.0</jna.version>
<groovy.version>2.5.8</groovy.version>
<mybatisplus.version>3.3.1</mybatisplus.version>
<swagger.version>2.9.2</swagger.version>
<swagger-models.version>1.5.21</swagger-models.version>
<spring.version>4.3.25.RELEASE</spring.version>
<junit.version>4.12</junit.version>
<spring-boot.version>2.3.5.RELEASE</spring-boot.version>
<swagger-bootstrap-ui.version>1.9.6</swagger-bootstrap-ui.version>
<jjwt.version>0.9.0</jjwt.version>
<netty.version>4.1.43.Final</netty.version>
<hessian.version>4.0.63</hessian.version>
<hadoop.version>2.7.3</hadoop.version>
<hive.jdbc.version>2.3.1</hive.jdbc.version>
<hbase.version>1.3.5</hbase.version>
<mongo-java-driver.version>3.4.2</mongo-java-driver.version>
<oshi.core.version>3.5.0</oshi.core.version>
<phoenix.version>5.0.0-HBase-2.0</phoenix.version>
</properties>
</project>

View File

@@ -0,0 +1,95 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>service-data-dts-parent</artifactId>
<groupId>com.platform</groupId>
<version>0.4.x</version>
</parent>
<artifactId>service-data-core</artifactId>
<packaging>jar</packaging>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>com.platform</groupId>
<artifactId>service-data-rpc</artifactId>
<version>${project.parent.version}</version>
</dependency>
<!-- groovy-all -->
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy</artifactId>
<version>${groovy.version}</version>
</dependency>
<!-- spring-context -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
<scope>provided</scope>
</dependency>
<!-- junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>net.java.dev.jna</groupId>
<artifactId>jna</artifactId>
<version>${jna.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>${commons-lang3.version}</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
<exclusions>
<exclusion>
<artifactId>jna</artifactId>
<groupId>net.java.dev.jna</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,50 @@
package com.platform.core.biz;
import com.platform.core.biz.model.HandleCallbackParam;
import com.platform.core.biz.model.HandleProcessCallbackParam;
import com.platform.core.biz.model.RegistryParam;
import com.platform.core.biz.model.ReturnT;
import java.util.List;
public interface AdminBiz {
// ---------------------- callback ----------------------
/**
* callback
*
* @param callbackParamList
* @return
*/
ReturnT<String> callback(List<HandleCallbackParam> callbackParamList);
/**
* processCallback
*
* @param processCallbackParamList
* @return
*/
ReturnT<String> processCallback(List<HandleProcessCallbackParam> processCallbackParamList);
// ---------------------- registry ----------------------
/**
* registry
*
* @param registryParam
* @return
*/
ReturnT<String> registry(RegistryParam registryParam);
/**
* registry remove
*
* @param registryParam
* @return
*/
ReturnT<String> registryRemove(RegistryParam registryParam);
}

View File

@@ -0,0 +1,49 @@
package com.platform.core.biz;
import com.platform.core.biz.model.LogResult;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
public interface ExecutorBiz {
/**
* beat
*
* @return
*/
ReturnT<String> beat();
/**
* idle beat
*
* @param jobId
* @return
*/
ReturnT<String> idleBeat(int jobId);
/**
* kill
*
* @param jobId
* @return
*/
ReturnT<String> kill(int jobId);
/**
* log
*
* @param logDateTim
* @param logId
* @param fromLineNum
* @return
*/
ReturnT<LogResult> log(long logDateTim, long logId, int fromLineNum);
/**
* run
*
* @param triggerParam
* @return
*/
ReturnT<String> run(TriggerParam triggerParam);
}

View File

@@ -0,0 +1,49 @@
package com.platform.core.biz.client;
import com.platform.core.biz.model.HandleCallbackParam;
import com.platform.core.biz.model.HandleProcessCallbackParam;
import com.platform.core.biz.model.RegistryParam;
import com.platform.core.util.JobRemotingUtil;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.model.ReturnT;
import java.util.List;
public class AdminBizClient implements AdminBiz {
public AdminBizClient() {
}
public AdminBizClient(String addressUrl, String accessToken) {
this.addressUrl = addressUrl;
this.accessToken = accessToken;
// valid
if (!this.addressUrl.endsWith("/")) {
this.addressUrl = this.addressUrl + "/";
}
}
private String addressUrl ;
private String accessToken;
@Override
public ReturnT<String> callback(List<HandleCallbackParam> callbackParamList) {
return JobRemotingUtil.postBody(addressUrl+"api/callback", accessToken, callbackParamList, 3);
}
@Override
public ReturnT<String> processCallback(List<HandleProcessCallbackParam> callbackParamList) {
return JobRemotingUtil.postBody(addressUrl + "api/processCallback", accessToken, callbackParamList, 3);
}
@Override
public ReturnT<String> registry(RegistryParam registryParam) {
return JobRemotingUtil.postBody(addressUrl + "api/registry", accessToken, registryParam, 3);
}
@Override
public ReturnT<String> registryRemove(RegistryParam registryParam) {
return JobRemotingUtil.postBody(addressUrl + "api/registryRemove", accessToken, registryParam, 3);
}
}

View File

@@ -0,0 +1,167 @@
package com.platform.core.biz.impl;
import com.platform.core.biz.ExecutorBiz;
import com.platform.core.biz.model.LogResult;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.core.glue.GlueFactory;
import com.platform.core.glue.GlueTypeEnum;
import com.platform.core.log.JobFileAppender;
import com.platform.core.thread.JobThread;
import com.platform.core.executor.JobExecutor;
import com.platform.core.handler.IJobHandler;
import com.platform.core.handler.impl.GlueJobHandler;
import com.platform.core.handler.impl.ScriptJobHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
public class ExecutorBizImpl implements ExecutorBiz {
private static Logger logger = LoggerFactory.getLogger(ExecutorBizImpl.class);
@Override
public ReturnT<String> beat() {
return ReturnT.SUCCESS;
}
@Override
public ReturnT<String> idleBeat(int jobId) {
// isRunningOrHasQueue
JobThread jobThread = JobExecutor.loadJobThread(jobId);
if (jobThread != null && jobThread.isRunningOrHasQueue()) {
return new ReturnT<>(ReturnT.FAIL_CODE, "job thread is running or has trigger queue.");
}
return ReturnT.SUCCESS;
}
@Override
public ReturnT<String> kill(int jobId) {
// kill handlerThread, and create new one
JobThread jobThread = JobExecutor.loadJobThread(jobId);
if (jobThread != null) {
JobExecutor.removeJobThread(jobId, "scheduling center kill job.");
return ReturnT.SUCCESS;
}
return new ReturnT<>(ReturnT.SUCCESS_CODE, "job thread already killed.");
}
@Override
public ReturnT<LogResult> log(long logDateTim, long logId, int fromLineNum) {
// log filename: logPath/yyyy-MM-dd/9999.log
String logFileName = JobFileAppender.makeLogFileName(new Date(logDateTim), logId);
LogResult logResult = JobFileAppender.readLog(logFileName, fromLineNum);
return new ReturnT<>(logResult);
}
@Override
public ReturnT<String> run(TriggerParam triggerParam) {
// load oldjobHandler + jobThread
JobThread jobThread = JobExecutor.loadJobThread(triggerParam.getJobId());
IJobHandler jobHandler = jobThread != null ? jobThread.getHandler() : null;
String removeOldReason = null;
// validjobHandler + jobThread
GlueTypeEnum glueTypeEnum = GlueTypeEnum.match(triggerParam.getGlueType());
if (GlueTypeEnum.BEAN == glueTypeEnum) {
// new jobhandler
IJobHandler newJobHandler = JobExecutor.loadJobHandler(triggerParam.getExecutorHandler());
// valid old jobThread
if (jobThread != null && jobHandler != newJobHandler) {
// change handler, need kill old thread
removeOldReason = "change jobhandler or glue type, and terminate the old job thread.";
jobThread = null;
jobHandler = null;
}
// valid handler
if (jobHandler == null) {
jobHandler = newJobHandler;
if (jobHandler == null) {
return new ReturnT<>(ReturnT.FAIL_CODE, "job handler [" + triggerParam.getExecutorHandler() + "] not found.");
}
}
} else if (GlueTypeEnum.GLUE_GROOVY == glueTypeEnum) {
// valid old jobThread
if (jobThread != null &&
!(jobThread.getHandler() instanceof GlueJobHandler
&& ((GlueJobHandler) jobThread.getHandler()).getGlueUpdatetime() == triggerParam.getGlueUpdatetime())) {
// change handler or gluesource updated, need kill old thread
removeOldReason = "change job source or glue type, and terminate the old job thread.";
jobThread = null;
jobHandler = null;
}
// valid handler
if (jobHandler == null) {
try {
IJobHandler originJobHandler = GlueFactory.getInstance().loadNewInstance(triggerParam.getGlueSource());
jobHandler = new GlueJobHandler(originJobHandler, triggerParam.getGlueUpdatetime());
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<String>(ReturnT.FAIL_CODE, e.getMessage());
}
}
} else if (glueTypeEnum != null && glueTypeEnum.isScript()) {
// valid old jobThread
if (jobThread != null &&
!(jobThread.getHandler() instanceof ScriptJobHandler
&& ((ScriptJobHandler) jobThread.getHandler()).getGlueUpdatetime() == triggerParam.getGlueUpdatetime())) {
// change script or gluesource updated, need kill old thread
removeOldReason = "change job source or glue type, and terminate the old job thread.";
jobThread = null;
jobHandler = null;
}
// valid handler
if (jobHandler == null) {
jobHandler = new ScriptJobHandler(triggerParam.getJobId(), triggerParam.getGlueUpdatetime(), triggerParam.getGlueSource(), GlueTypeEnum.match(triggerParam.getGlueType()));
}
} else {
return new ReturnT<>(ReturnT.FAIL_CODE, "glueType[" + triggerParam.getGlueType() + "] is not valid.");
}
// executor block strategy
if (jobThread != null) {
ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum.match(triggerParam.getExecutorBlockStrategy(), null);
if (ExecutorBlockStrategyEnum.DISCARD_LATER == blockStrategy) {
// discard when running
if (jobThread.isRunningOrHasQueue()) {
return new ReturnT<>(ReturnT.FAIL_CODE, "block strategy effect" + ExecutorBlockStrategyEnum.DISCARD_LATER.getTitle());
}
} else if (ExecutorBlockStrategyEnum.COVER_EARLY == blockStrategy) {
// kill running jobThread
if (jobThread.isRunningOrHasQueue()) {
removeOldReason = "block strategy effect" + ExecutorBlockStrategyEnum.COVER_EARLY.getTitle();
jobThread = null;
}
} else {
// just queue trigger
}
}
// replace thread (new or exists invalid)
if (jobThread == null) {
jobThread = JobExecutor.registJobThread(triggerParam.getJobId(), jobHandler, removeOldReason);
}
// push data to queue
ReturnT<String> pushResult = jobThread.pushTriggerQueue(triggerParam);
return pushResult;
}
}

View File

@@ -0,0 +1,53 @@
package com.platform.core.biz.model;
import java.io.Serializable;
public class HandleCallbackParam implements Serializable {
private static final long serialVersionUID = 42L;
private long logId;
private long logDateTim;
private ReturnT<String> executeResult;
public HandleCallbackParam(){}
public HandleCallbackParam(long logId, long logDateTim, ReturnT<String> executeResult) {
this.logId = logId;
this.logDateTim = logDateTim;
this.executeResult = executeResult;
}
public long getLogId() {
return logId;
}
public void setLogId(long logId) {
this.logId = logId;
}
public long getLogDateTim() {
return logDateTim;
}
public void setLogDateTim(long logDateTim) {
this.logDateTim = logDateTim;
}
public ReturnT<String> getExecuteResult() {
return executeResult;
}
public void setExecuteResult(ReturnT<String> executeResult) {
this.executeResult = executeResult;
}
@Override
public String toString() {
return "HandleCallbackParam{" +
"logId=" + logId +
", logDateTim=" + logDateTim +
", executeResult=" + executeResult +
'}';
}
}

View File

@@ -0,0 +1,54 @@
package com.platform.core.biz.model;
import java.io.Serializable;
public class HandleProcessCallbackParam implements Serializable {
private static final long serialVersionUID = 42L;
private long logId;
private String processId;
private long logDateTime;
public HandleProcessCallbackParam(){}
public HandleProcessCallbackParam(long logId,long logDateTime, String processId) {
this.logId = logId;
this.processId = processId;
this.logDateTime=logDateTime;
}
public long getLogId() {
return logId;
}
public void setLogId(long logId) {
this.logId = logId;
}
public String getProcessId() {
return processId;
}
public void setProcessId(String processId) {
this.processId = processId;
}
public long getLogDateTime() {
return logDateTime;
}
public void setLogDateTime(long logDateTime) {
this.logDateTime = logDateTime;
}
@Override
public String toString() {
return "HandleCallbackParam{" +
"logId=" + logId +
", processId=" + processId +
", logDateTime=" + logDateTime +
'}';
}
}

View File

@@ -0,0 +1,52 @@
package com.platform.core.biz.model;
import java.io.Serializable;
public class LogResult implements Serializable {
private static final long serialVersionUID = 42L;
public LogResult(int fromLineNum, int toLineNum, String logContent, boolean isEnd) {
this.fromLineNum = fromLineNum;
this.toLineNum = toLineNum;
this.logContent = logContent;
this.isEnd = isEnd;
}
private int fromLineNum;
private int toLineNum;
private String logContent;
private boolean isEnd;
public int getFromLineNum() {
return fromLineNum;
}
public void setFromLineNum(int fromLineNum) {
this.fromLineNum = fromLineNum;
}
public int getToLineNum() {
return toLineNum;
}
public void setToLineNum(int toLineNum) {
this.toLineNum = toLineNum;
}
public String getLogContent() {
return logContent;
}
public void setLogContent(String logContent) {
this.logContent = logContent;
}
public boolean isEnd() {
return isEnd;
}
public void setEnd(boolean end) {
isEnd = end;
}
}

View File

@@ -0,0 +1,92 @@
package com.platform.core.biz.model;
import java.io.Serializable;
public class RegistryParam implements Serializable {
private static final long serialVersionUID = 42L;
private String registryGroup;
private String registryKey;
private String registryValue;
private double cpuUsage;
private double memoryUsage;
private double loadAverage;
public RegistryParam() {
}
public RegistryParam(String registryGroup, String registryKey, String registryValue) {
this.registryGroup = registryGroup;
this.registryKey = registryKey;
this.registryValue = registryValue;
}
public RegistryParam(String registryGroup, String registryKey, String registryValue, double cpuUsage, double memoryUsage, double loadAverage) {
this.registryGroup = registryGroup;
this.registryKey = registryKey;
this.registryValue = registryValue;
this.cpuUsage = cpuUsage;
this.memoryUsage = memoryUsage;
this.loadAverage = loadAverage;
}
public String getRegistryGroup() {
return registryGroup;
}
public void setRegistryGroup(String registryGroup) {
this.registryGroup = registryGroup;
}
public String getRegistryKey() {
return registryKey;
}
public void setRegistryKey(String registryKey) {
this.registryKey = registryKey;
}
public String getRegistryValue() {
return registryValue;
}
public void setRegistryValue(String registryValue) {
this.registryValue = registryValue;
}
public double getCpuUsage() {
return cpuUsage;
}
public void setCpuUsage(double cpuUsage) {
this.cpuUsage = cpuUsage;
}
public double getMemoryUsage() {
return memoryUsage;
}
public void setMemoryUsage(double memoryUsage) {
this.memoryUsage = memoryUsage;
}
public double getLoadAverage() {
return loadAverage;
}
public void setLoadAverage(double loadAverage) {
this.loadAverage = loadAverage;
}
@Override
public String toString() {
return "RegistryParam{" +
"registryGroup='" + registryGroup + '\'' +
", registryKey='" + registryKey + '\'' +
", registryValue='" + registryValue + '\'' +
", cpuUsage='" + cpuUsage + '\'' +
", memoryUsage='" + memoryUsage + '\'' +
", loadAverage='" + loadAverage + '\'' +
'}';
}
}

View File

@@ -0,0 +1,53 @@
package com.platform.core.biz.model;
import java.io.Serializable;
public class ReturnT<T> implements Serializable {
public static final long serialVersionUID = 42L;
public static final int SUCCESS_CODE = 200;
public static final int FAIL_CODE = 500;
public static final ReturnT<String> SUCCESS = new ReturnT<>(null);
public static final ReturnT<String> FAIL = new ReturnT<>(FAIL_CODE, null);
private int code;
private String msg;
private T content;
public ReturnT(){}
public ReturnT(int code, String msg) {
this.code = code;
this.msg = msg;
}
public ReturnT(T content) {
this.code = SUCCESS_CODE;
this.content = content;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public T getContent() {
return content;
}
public void setContent(T content) {
this.content = content;
}
@Override
public String toString() {
return "ReturnT [code=" + code + ", msg=" + msg + ", content=" + content + "]";
}
}

View File

@@ -0,0 +1,257 @@
package com.platform.core.biz.model;
import java.io.Serializable;
import java.util.Date;
public class TriggerParam implements Serializable{
private static final long serialVersionUID = 42L;
private int jobId;
private String executorHandler;
private String executorParams;
private String executorBlockStrategy;
private int executorTimeout;
private long logId;
private long logDateTime;
private String glueType;
private String glueSource;
private long glueUpdatetime;
private int broadcastIndex;
private int broadcastTotal;
private String jobJson;
private String processId;
private String replaceParam;
private String jvmParam;
private Date startTime;
private Date triggerTime;
private String partitionInfo;
private long startId;
private long endId;
private Integer incrementType;
private String replaceParamType;
public int getJobId() {
return jobId;
}
public void setJobId(int jobId) {
this.jobId = jobId;
}
public String getExecutorHandler() {
return executorHandler;
}
public void setExecutorHandler(String executorHandler) {
this.executorHandler = executorHandler;
}
public String getExecutorParams() {
return executorParams;
}
public void setExecutorParams(String executorParams) {
this.executorParams = executorParams;
}
public String getExecutorBlockStrategy() {
return executorBlockStrategy;
}
public void setExecutorBlockStrategy(String executorBlockStrategy) {
this.executorBlockStrategy = executorBlockStrategy;
}
public int getExecutorTimeout() {
return executorTimeout;
}
public void setExecutorTimeout(int executorTimeout) {
this.executorTimeout = executorTimeout;
}
public long getLogId() {
return logId;
}
public void setLogId(long logId) {
this.logId = logId;
}
public long getLogDateTime() {
return logDateTime;
}
public void setLogDateTime(long logDateTime) {
this.logDateTime = logDateTime;
}
public String getGlueType() {
return glueType;
}
public void setGlueType(String glueType) {
this.glueType = glueType;
}
public String getGlueSource() {
return glueSource;
}
public void setGlueSource(String glueSource) {
this.glueSource = glueSource;
}
public long getGlueUpdatetime() {
return glueUpdatetime;
}
public void setGlueUpdatetime(long glueUpdatetime) {
this.glueUpdatetime = glueUpdatetime;
}
public int getBroadcastIndex() {
return broadcastIndex;
}
public void setBroadcastIndex(int broadcastIndex) {
this.broadcastIndex = broadcastIndex;
}
public int getBroadcastTotal() {
return broadcastTotal;
}
public void setBroadcastTotal(int broadcastTotal) {
this.broadcastTotal = broadcastTotal;
}
public String getJobJson() {
return jobJson;
}
public void setJobJson(String jobJson) {
this.jobJson = jobJson;
}
public String getProcessId() {
return processId;
}
public void setProcessId(String processId) {
this.processId = processId;
}
public String getReplaceParam() {
return replaceParam;
}
public void setReplaceParam(String replaceParam) {
this.replaceParam = replaceParam;
}
public String getJvmParam() {
return jvmParam;
}
public void setJvmParam(String jvmParam) {
this.jvmParam = jvmParam;
}
public Date getStartTime() {
return startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
public Date getTriggerTime() {
return triggerTime;
}
public void setTriggerTime(Date triggerTime) {
this.triggerTime = triggerTime;
}
public String getPartitionInfo() {
return partitionInfo;
}
public void setPartitionInfo(String partitionInfo) {
this.partitionInfo = partitionInfo;
}
public long getStartId() {
return startId;
}
public void setStartId(long startId) {
this.startId = startId;
}
public long getEndId() {
return endId;
}
public void setEndId(long endId) {
this.endId = endId;
}
public Integer getIncrementType() {
return incrementType;
}
public void setIncrementType(Integer incrementType) {
this.incrementType = incrementType;
}
public String getReplaceParamType() {
return replaceParamType;
}
public void setReplaceParamType(String replaceParamType) {
this.replaceParamType = replaceParamType;
}
@Override
public String toString() {
return "TriggerParam{" +
"jobId=" + jobId +
", executorHandler='" + executorHandler + '\'' +
", executorParams='" + executorParams + '\'' +
", executorBlockStrategy='" + executorBlockStrategy + '\'' +
", executorTimeout=" + executorTimeout +
", logId=" + logId +
", logDateTime=" + logDateTime +
", glueType='" + glueType + '\'' +
", glueSource='" + glueSource + '\'' +
", glueUpdatetime=" + glueUpdatetime +
", broadcastIndex=" + broadcastIndex +
", broadcastTotal=" + broadcastTotal +
", jobJson=" + jobJson +
", processId=" + processId +
", replaceParam=" + replaceParam +
", jvmParam=" + jvmParam +
", startTime=" + startTime +
", triggerTime=" + triggerTime +
", partitionInfo=" + partitionInfo +
", replaceParamType=" + replaceParamType +
", startId=" + startId +
", endId=" + endId +
", incrementType=" + incrementType +
'}';
}
}

View File

@@ -0,0 +1,31 @@
package com.platform.core.enums;
public enum ExecutorBlockStrategyEnum {
SERIAL_EXECUTION("Serial execution"),
/*CONCURRENT_EXECUTION("并行"),*/
DISCARD_LATER("Discard Later"),
COVER_EARLY("Cover Early");
private String title;
ExecutorBlockStrategyEnum(String title) {
this.title = title;
}
public void setTitle(String title) {
this.title = title;
}
public String getTitle() {
return title;
}
public static ExecutorBlockStrategyEnum match(String name, ExecutorBlockStrategyEnum defaultItem) {
if (name != null) {
for (ExecutorBlockStrategyEnum item:ExecutorBlockStrategyEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
}
return defaultItem;
}
}

View File

@@ -0,0 +1,32 @@
package com.platform.core.enums;
/**
* increment type
*/
public enum IncrementTypeEnum {
/**
* 2 TIME
* 1 ID
* 3 PARTITION
*/
TIME(2, "时间"),
ID(1, "自增主键"),
PARTITION(3, "HIVE分区");
IncrementTypeEnum(int code, String descp){
this.code = code;
this.descp = descp;
}
private final int code;
private final String descp;
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
}

View File

@@ -0,0 +1,10 @@
package com.platform.core.enums;
public class RegistryConfig {
public static final int BEAT_TIMEOUT = 30;
public static final int DEAD_TIMEOUT = BEAT_TIMEOUT * 3;
public enum RegistType{ EXECUTOR, ADMIN }
}

View File

@@ -0,0 +1,277 @@
package com.platform.core.executor;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.ExecutorBiz;
import com.platform.core.biz.client.AdminBizClient;
import com.platform.core.biz.impl.ExecutorBizImpl;
import com.platform.core.handler.IJobHandler;
import com.platform.core.log.JobFileAppender;
import com.platform.core.thread.*;
import com.platform.rpc.registry.ServiceRegistry;
import com.platform.rpc.remoting.net.impl.netty_http.server.NettyHttpServer;
import com.platform.rpc.remoting.provider.XxlRpcProviderFactory;
import com.platform.rpc.serialize.Serializer;
import com.platform.rpc.serialize.impl.HessianSerializer;
import com.platform.rpc.util.IpUtil;
import com.platform.rpc.util.NetUtil;
import com.platform.core.thread.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class JobExecutor {
private static final Logger logger = LoggerFactory.getLogger(JobExecutor.class);
// ---------------------- param ----------------------
private String adminAddresses;
private String appName;
private String ip;
private int port;
private String accessToken;
private String logPath;
private int logRetentionDays;
public void setAdminAddresses(String adminAddresses) {
this.adminAddresses = adminAddresses;
}
public void setAppName(String appName) {
this.appName = appName;
}
public void setIp(String ip) {
this.ip = ip;
}
public void setPort(int port) {
this.port = port;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public void setLogPath(String logPath) {
this.logPath = logPath;
}
public void setLogRetentionDays(int logRetentionDays) {
this.logRetentionDays = logRetentionDays;
}
// ---------------------- start + stop ----------------------
public void start() throws Exception {
// init logpath
JobFileAppender.initLogPath(logPath);
// init invoker, admin-client
initAdminBizList(adminAddresses, accessToken);
// init JobLogFileCleanThread
JobLogFileCleanThread.getInstance().start(logRetentionDays);
// init TriggerCallbackThread
TriggerCallbackThread.getInstance().start();
// init ProcessCallbackThread
ProcessCallbackThread.getInstance().start();
// init executor-server
port = port > 0 ? port : NetUtil.findAvailablePort(9999);
ip = (ip != null && ip.trim().length() > 0) ? ip : IpUtil.getIp();
initRpcProvider(ip, port, appName, accessToken);
}
public void destroy() {
// destory executor-server
stopRpcProvider();
// destory jobThreadRepository
if (jobThreadRepository.size() > 0) {
for (Map.Entry<Integer, JobThread> item : jobThreadRepository.entrySet()) {
removeJobThread(item.getKey(), "web container destroy and kill the job.");
JobThread oldJobThread = removeJobThread(item.getKey(), "web container destroy and kill the job.");
// wait for job thread push result to callback queue
if (oldJobThread != null) {
try {
oldJobThread.join();
} catch (InterruptedException e) {
logger.error(">>>>>>>>>>> web, JobThread destroy(join) error, jobId:{}", item.getKey(), e);
}
}
}
jobThreadRepository.clear();
}
jobHandlerRepository.clear();
// destory JobLogFileCleanThread
JobLogFileCleanThread.getInstance().toStop();
// destory TriggerCallbackThread
TriggerCallbackThread.getInstance().toStop();
// destory ProcessCallbackThread
ProcessCallbackThread.getInstance().toStop();
}
// ---------------------- admin-client (rpc invoker) ----------------------
private static List<AdminBiz> adminBizList;
private static Serializer serializer = new HessianSerializer();
private void initAdminBizList(String adminAddresses, String accessToken) throws Exception {
if (adminAddresses != null && adminAddresses.trim().length() > 0) {
for (String address : adminAddresses.trim().split(",")) {
if (address != null && address.trim().length() > 0) {
//实例化AdminBizClient
AdminBiz adminBiz = new AdminBizClient(address.trim(), accessToken);
if (adminBizList == null) {
adminBizList = new ArrayList<>();
}
adminBizList.add(adminBiz);
}
}
}
}
public static List<AdminBiz> getAdminBizList() {
return adminBizList;
}
public static Serializer getSerializer() {
return serializer;
}
// ---------------------- executor-server (rpc provider) ----------------------
private XxlRpcProviderFactory xxlRpcProviderFactory = null;
private void initRpcProvider(String ip, int port, String appName, String accessToken) throws Exception {
// init, provider factory
String address = IpUtil.getIpPort(ip, port);
Map<String, String> serviceRegistryParam = new HashMap<>();
serviceRegistryParam.put("appName", appName);
serviceRegistryParam.put("address", address);
xxlRpcProviderFactory = new XxlRpcProviderFactory();
xxlRpcProviderFactory.setServer(NettyHttpServer.class);
xxlRpcProviderFactory.setSerializer(HessianSerializer.class);
xxlRpcProviderFactory.setCorePoolSize(20);
xxlRpcProviderFactory.setMaxPoolSize(200);
xxlRpcProviderFactory.setIp(ip);
xxlRpcProviderFactory.setPort(port);
xxlRpcProviderFactory.setAccessToken(accessToken);
xxlRpcProviderFactory.setServiceRegistry(ExecutorServiceRegistry.class);
xxlRpcProviderFactory.setServiceRegistryParam(serviceRegistryParam);
// add services
xxlRpcProviderFactory.addService(ExecutorBiz.class.getName(), null, new ExecutorBizImpl());
// start
xxlRpcProviderFactory.start();
}
public static class ExecutorServiceRegistry extends ServiceRegistry {
@Override
public void start(Map<String, String> param) {
// start registry
ExecutorRegistryThread.getInstance().start(param.get("appName"), param.get("address"));
}
@Override
public void stop() {
// stop registry
ExecutorRegistryThread.getInstance().toStop();
}
@Override
public boolean registry(Set<String> keys, String value) {
return false;
}
@Override
public boolean remove(Set<String> keys, String value) {
return false;
}
@Override
public Map<String, TreeSet<String>> discovery(Set<String> keys) {
return null;
}
@Override
public TreeSet<String> discovery(String key) {
return null;
}
}
private void stopRpcProvider() {
// stop provider factory
try {
xxlRpcProviderFactory.stop();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
// ---------------------- job handler repository ----------------------
private static ConcurrentMap<String, IJobHandler> jobHandlerRepository = new ConcurrentHashMap<String, IJobHandler>();
public static IJobHandler registJobHandler(String name, IJobHandler jobHandler) {
logger.info(">>>>>>>>>>> web register jobhandler success, name:{}, jobHandler:{}", name, jobHandler);
return jobHandlerRepository.put(name, jobHandler);
}
public static IJobHandler loadJobHandler(String name) {
return jobHandlerRepository.get(name);
}
// ---------------------- job thread repository ----------------------
private static ConcurrentMap<Integer, JobThread> jobThreadRepository = new ConcurrentHashMap<Integer, JobThread>();
public static JobThread registJobThread(int jobId, IJobHandler handler, String removeOldReason) {
JobThread newJobThread = new JobThread(jobId, handler);
newJobThread.start();
logger.info(">>>>>>>>>>> web regist JobThread success, jobId:{}, handler:{}", new Object[]{jobId, handler});
JobThread oldJobThread = jobThreadRepository.put(jobId, newJobThread); // putIfAbsent | oh my god, map's put method return the old value!!!
if (oldJobThread != null) {
oldJobThread.toStop(removeOldReason);
oldJobThread.interrupt();
}
return newJobThread;
}
public static JobThread removeJobThread(int jobId, String removeOldReason) {
JobThread oldJobThread = jobThreadRepository.remove(jobId);
if (oldJobThread != null) {
oldJobThread.toStop(removeOldReason);
oldJobThread.interrupt();
return oldJobThread;
}
return null;
}
public static JobThread loadJobThread(int jobId) {
JobThread jobThread = jobThreadRepository.get(jobId);
return jobThread;
}
}

View File

@@ -0,0 +1,81 @@
package com.platform.core.executor.impl;
import cn.hutool.core.collection.CollectionUtil;
import com.platform.core.glue.GlueFactory;
import com.platform.core.handler.IJobHandler;
import com.platform.core.handler.annotation.JobHandler;
import com.platform.core.executor.JobExecutor;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.SmartInitializingSingleton;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import java.util.Map;
public class JobSpringExecutor extends JobExecutor
implements ApplicationContextAware, SmartInitializingSingleton, DisposableBean {
// start
@Override
public void afterSingletonsInstantiated() {
// init JobHandler Repository
initJobHandlerRepository(applicationContext);
// refresh GlueFactory
GlueFactory.refreshInstance(1);
// super start
try {
super.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// destroy
@Override
public void destroy() {
super.destroy();
}
private void initJobHandlerRepository(ApplicationContext applicationContext) {
if (applicationContext == null) {
return;
}
// init job handler action
Map<String, Object> serviceBeanMap = applicationContext.getBeansWithAnnotation(JobHandler.class);
if (CollectionUtil.isNotEmpty(serviceBeanMap)) {
for (Object serviceBean : serviceBeanMap.values()) {
if (serviceBean instanceof IJobHandler) {
String name = serviceBean.getClass().getAnnotation(JobHandler.class).value();
IJobHandler handler = (IJobHandler) serviceBean;
if (loadJobHandler(name) != null) {
throw new RuntimeException("web jobhandler[" + name + "] naming conflicts.");
}
registJobHandler(name, handler);
}
}
}
}
// ---------------------- applicationContext ----------------------
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
public static ApplicationContext getApplicationContext() {
return applicationContext;
}
}

View File

@@ -0,0 +1,89 @@
package com.platform.core.glue;
import com.platform.core.glue.impl.SpringGlueFactory;
import com.platform.core.handler.IJobHandler;
import groovy.lang.GroovyClassLoader;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class GlueFactory {
private static GlueFactory glueFactory = new GlueFactory();
public static GlueFactory getInstance() {
return glueFactory;
}
public static void refreshInstance(int type) {
if (type == 0) {
glueFactory = new GlueFactory();
} else if (type == 1) {
glueFactory = new SpringGlueFactory();
}
}
/**
* groovy class loader
*/
private GroovyClassLoader groovyClassLoader = new GroovyClassLoader();
private ConcurrentMap<String, Class<?>> CLASS_CACHE = new ConcurrentHashMap<>();
/**
* load new instance, prototype
*
* @param codeSource
* @return
* @throws Exception
*/
public IJobHandler loadNewInstance(String codeSource) throws Exception {
if (codeSource != null && codeSource.trim().length() > 0) {
Class<?> clazz = getCodeSourceClass(codeSource);
if (clazz != null) {
Object instance = clazz.newInstance();
if (instance != null) {
if (instance instanceof IJobHandler) {
this.injectService(instance);
return (IJobHandler) instance;
} else {
throw new IllegalArgumentException(">>>>>>>>>>> xxl-glue, loadNewInstance error, "
+ "cannot convert from instance[" + instance.getClass() + "] to IJobHandler");
}
}
}
}
throw new IllegalArgumentException(">>>>>>>>>>> xxl-glue, loadNewInstance error, instance is null");
}
private Class<?> getCodeSourceClass(String codeSource) {
try {
// md5
byte[] md5 = MessageDigest.getInstance("MD5").digest(codeSource.getBytes());
String md5Str = new BigInteger(1, md5).toString(16);
Class<?> clazz = CLASS_CACHE.get(md5Str);
if (clazz == null) {
clazz = groovyClassLoader.parseClass(codeSource);
CLASS_CACHE.putIfAbsent(md5Str, clazz);
}
return clazz;
} catch (Exception e) {
return groovyClassLoader.parseClass(codeSource);
}
}
/**
* inject service of bean field
*
* @param instance
*/
public void injectService(Object instance) {
// do something
}
}

View File

@@ -0,0 +1,52 @@
package com.platform.core.glue;
public enum GlueTypeEnum {
seatunnel("seatunnel", false, null, null),
datax("data", false, null, null),
flinkx("flinkx", false, null, null),
BEAN("BEAN", false, null, null),
GLUE_GROOVY("GLUE(Java)", false, null, null),
GLUE_SHELL("GLUE(Shell)", true, "bash", ".sh"),
GLUE_PYTHON("GLUE(Python)", true, "python", ".py"),
GLUE_PHP("GLUE(PHP)", true, "php", ".php"),
GLUE_NODEJS("GLUE(Nodejs)", true, "node", ".js"),
GLUE_POWERSHELL("GLUE(PowerShell)", true, "powershell", ".ps1");
private String desc;
private boolean isScript;
private String cmd;
private String suffix;
private GlueTypeEnum(String desc, boolean isScript, String cmd, String suffix) {
this.desc = desc;
this.isScript = isScript;
this.cmd = cmd;
this.suffix = suffix;
}
public String getDesc() {
return desc;
}
public boolean isScript() {
return isScript;
}
public String getCmd() {
return cmd;
}
public String getSuffix() {
return suffix;
}
public static GlueTypeEnum match(String name){
for (GlueTypeEnum item: GlueTypeEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
return null;
}
}

View File

@@ -0,0 +1,78 @@
package com.platform.core.glue.impl;
import com.platform.core.glue.GlueFactory;
import com.platform.core.executor.impl.JobSpringExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.annotation.AnnotationUtils;
import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
public class SpringGlueFactory extends GlueFactory {
private static Logger logger = LoggerFactory.getLogger(SpringGlueFactory.class);
/**
* inject action of spring
* @param instance
*/
@Override
public void injectService(Object instance){
if (instance==null) {
return;
}
if (JobSpringExecutor.getApplicationContext() == null) {
return;
}
Field[] fields = instance.getClass().getDeclaredFields();
for (Field field : fields) {
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
Object fieldBean = null;
// with bean-id, bean could be found by both @Resource and @Autowired, or bean could only be found by @Autowired
if (AnnotationUtils.getAnnotation(field, Resource.class) != null) {
try {
Resource resource = AnnotationUtils.getAnnotation(field, Resource.class);
if (resource.name()!=null && resource.name().length()>0){
fieldBean = JobSpringExecutor.getApplicationContext().getBean(resource.name());
} else {
fieldBean = JobSpringExecutor.getApplicationContext().getBean(field.getName());
}
} catch (Exception e) {
}
if (fieldBean==null ) {
fieldBean = JobSpringExecutor.getApplicationContext().getBean(field.getType());
}
} else if (AnnotationUtils.getAnnotation(field, Autowired.class) != null) {
Qualifier qualifier = AnnotationUtils.getAnnotation(field, Qualifier.class);
if (qualifier!=null && qualifier.value()!=null && qualifier.value().length()>0) {
fieldBean = JobSpringExecutor.getApplicationContext().getBean(qualifier.value());
} else {
fieldBean = JobSpringExecutor.getApplicationContext().getBean(field.getType());
}
}
if (fieldBean!=null) {
field.setAccessible(true);
try {
field.set(instance, fieldBean);
} catch (IllegalArgumentException e) {
logger.error(e.getMessage(), e);
} catch (IllegalAccessException e) {
logger.error(e.getMessage(), e);
}
}
}
}
}

View File

@@ -0,0 +1,45 @@
package com.platform.core.handler;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public abstract class IJobHandler {
/** success */
public static final ReturnT<String> SUCCESS = new ReturnT<>(200, null);
/** fail */
public static final ReturnT<String> FAIL = new ReturnT<>(500, null);
/** fail timeout */
public static final ReturnT<String> FAIL_TIMEOUT = new ReturnT<>(502, null);
public static final ConcurrentMap<String, String> jobTmpFiles = new ConcurrentHashMap<>();
/**
* execute handler, invoked when executor receives a scheduling request
*
* @param tgParam
* @return
* @throws Exception
*/
public abstract ReturnT<String> execute(TriggerParam tgParam) throws Exception;
/**
* init handler, invoked when JobThread init
*/
public void init() {
// do something
}
/**
* destroy handler, invoked when JobThread destroy
*/
public void destroy() {
// do something
}
}

View File

@@ -0,0 +1,15 @@
package com.platform.core.handler.annotation;
import java.lang.annotation.*;
/**
* annotation for job handler
*/
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
public @interface JobHandler {
String value() default "";
}

View File

@@ -0,0 +1,25 @@
package com.platform.core.handler.impl;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.log.JobLogger;
import com.platform.core.handler.IJobHandler;
public class GlueJobHandler extends IJobHandler {
private long glueUpdatetime;
private IJobHandler jobHandler;
public GlueJobHandler(IJobHandler jobHandler, long glueUpdatetime) {
this.jobHandler = jobHandler;
this.glueUpdatetime = glueUpdatetime;
}
public long getGlueUpdatetime() {
return glueUpdatetime;
}
@Override
public ReturnT<String> execute(TriggerParam tgParam) throws Exception {
JobLogger.log("----------- glue.version:"+ glueUpdatetime +" -----------");
return jobHandler.execute(tgParam);
}
}

View File

@@ -0,0 +1,89 @@
package com.platform.core.handler.impl;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.glue.GlueTypeEnum;
import com.platform.core.handler.IJobHandler;
import com.platform.core.log.JobFileAppender;
import com.platform.core.log.JobLogger;
import com.platform.core.util.ScriptUtil;
import com.platform.core.util.ShardingUtil;
import java.io.File;
public class ScriptJobHandler extends IJobHandler {
private int jobId;
private long glueUpdatetime;
private String gluesource;
private GlueTypeEnum glueType;
public ScriptJobHandler(int jobId, long glueUpdatetime, String gluesource, GlueTypeEnum glueType){
this.jobId = jobId;
this.glueUpdatetime = glueUpdatetime;
this.gluesource = gluesource;
this.glueType = glueType;
// clean old script file
File glueSrcPath = new File(JobFileAppender.getGlueSrcPath());
if (glueSrcPath.exists()) {
File[] glueSrcFileList = glueSrcPath.listFiles();
if (glueSrcFileList!=null && glueSrcFileList.length>0) {
for (File glueSrcFileItem : glueSrcFileList) {
if (glueSrcFileItem.getName().startsWith(jobId +"_")) {
glueSrcFileItem.delete();
}
}
}
}
}
public long getGlueUpdatetime() {
return glueUpdatetime;
}
@Override
public ReturnT<String> execute(TriggerParam tgParam) throws Exception {
if (!glueType.isScript()) {
return new ReturnT<>(IJobHandler.FAIL.getCode(), "glueType[" + glueType + "] invalid.");
}
// cmd
String cmd = glueType.getCmd();
// make script file
String scriptFileName = JobFileAppender.getGlueSrcPath()
.concat(File.separator)
.concat(String.valueOf(jobId))
.concat("_")
.concat(String.valueOf(glueUpdatetime))
.concat(glueType.getSuffix());
File scriptFile = new File(scriptFileName);
if (!scriptFile.exists()) {
ScriptUtil.markScriptFile(scriptFileName, gluesource);
}
// log file
String logFileName = JobFileAppender.contextHolder.get();
// script params0=param、1=分片序号、2=分片总数
ShardingUtil.ShardingVO shardingVO = ShardingUtil.getShardingVo();
String[] scriptParams = new String[3];
scriptParams[0] = tgParam.getExecutorParams();
scriptParams[1] = String.valueOf(shardingVO.getIndex());
scriptParams[2] = String.valueOf(shardingVO.getTotal());
// invoke
JobLogger.log("----------- script file:"+ scriptFileName +" -----------");
int exitValue = ScriptUtil.execToFile(cmd, scriptFileName, logFileName,tgParam.getLogId(),tgParam.getLogDateTime(), scriptParams);
if (exitValue == 0) {
return IJobHandler.SUCCESS;
} else {
return new ReturnT<>(IJobHandler.FAIL.getCode(), "script exit value(" + exitValue + ") is failed");
}
}
}

View File

@@ -0,0 +1,222 @@
package com.platform.core.log;
import com.platform.core.biz.model.LogResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.Date;
public class JobFileAppender {
private static Logger logger = LoggerFactory.getLogger(JobFileAppender.class);
// for JobThread (support log for child thread of job handler)
//public static ThreadLocal<String> contextHolder = new ThreadLocal<String>();
public static final InheritableThreadLocal<String> contextHolder = new InheritableThreadLocal<>();
/**
* log base path
*
* strut like:
* ---/
* ---/gluesource/
* ---/gluesource/10_1514171108000.js
* ---/gluesource/10_1514171108000.js
* ---/2017-12-25/
* ---/2017-12-25/639.log
* ---/2017-12-25/821.log
*
*/
private static String logBasePath = "/data/applogs/executor/jobhandler";
private static String glueSrcPath = logBasePath.concat("/gluesource");
public static void initLogPath(String logPath){
// init
if (logPath!=null && logPath.trim().length()>0) {
logBasePath = logPath;
}
// mk base dir
File logPathDir = new File(logBasePath);
if (!logPathDir.exists()) {
logPathDir.mkdirs();
}
logBasePath = logPathDir.getPath();
// mk glue dir
File glueBaseDir = new File(logPathDir, "gluesource");
if (!glueBaseDir.exists()) {
glueBaseDir.mkdirs();
}
glueSrcPath = glueBaseDir.getPath();
}
public static String getLogPath() {
return logBasePath;
}
public static String getGlueSrcPath() {
return glueSrcPath;
}
/**
* log filename, like "logPath/yyyy-MM-dd/9999.log"
*
* @param triggerDate
* @param logId
* @return
*/
public static String makeLogFileName(Date triggerDate, long logId) {
// filePath/yyyy-MM-dd
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); // avoid concurrent problem, can not be static
File logFilePath = new File(getLogPath(), sdf.format(triggerDate));
if (!logFilePath.exists()) {
logFilePath.mkdir();
}
// filePath/yyyy-MM-dd/9999.log
String logFileName = logFilePath.getPath()
.concat(File.separator)
.concat(String.valueOf(logId))
.concat(".log");
return logFileName;
}
/**
* append log
*
* @param logFileName
* @param appendLog
*/
public static void appendLog(String logFileName, String appendLog) {
// log file
if (logFileName==null || logFileName.trim().length()==0) {
return;
}
File logFile = new File(logFileName);
if (!logFile.exists()) {
try {
logFile.createNewFile();
} catch (IOException e) {
logger.error(e.getMessage(), e);
return;
}
}
// log
if (appendLog == null) {
appendLog = "";
}
appendLog += "\r\n";
// append file content
FileOutputStream fos = null;
try {
fos = new FileOutputStream(logFile, true);
fos.write(appendLog.getBytes("utf-8"));
fos.flush();
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
}
/**
* support read log-file
*
* @param logFileName
* @return log content
*/
public static LogResult readLog(String logFileName, int fromLineNum){
// valid log file
if (logFileName==null || logFileName.trim().length()==0) {
return new LogResult(fromLineNum, 0, "readLog fail, logFile not found", true);
}
File logFile = new File(logFileName);
if (!logFile.exists()) {
return new LogResult(fromLineNum, 0, "readLog fail, logFile not exists", true);
}
// read file
StringBuffer logContentBuffer = new StringBuffer();
int toLineNum = 0;
LineNumberReader reader = null;
try {
//reader = new LineNumberReader(new FileReader(logFile));
reader = new LineNumberReader(new InputStreamReader(new FileInputStream(logFile), "utf-8"));
String line;
while ((line = reader.readLine())!=null) {
toLineNum = reader.getLineNumber(); // [from, to], start as 1
if (toLineNum >= fromLineNum) {
logContentBuffer.append(line).append("\n");
}
}
} catch (IOException e) {
logger.error(e.getMessage(), e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
// result
LogResult logResult = new LogResult(fromLineNum, toLineNum, logContentBuffer.toString(), false);
return logResult;
/*
// it will return the number of characters actually skipped
reader.skip(Long.MAX_VALUE);
int maxLineNum = reader.getLineNumber();
maxLineNum++; // 最大行号
*/
}
/**
* read log data
* @param logFile
* @return log line content
*/
public static String readLines(File logFile){
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(logFile), "utf-8"));
if (reader != null) {
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
return sb.toString();
}
} catch (IOException e) {
logger.error(e.getMessage(), e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
return null;
}
}

View File

@@ -0,0 +1,77 @@
package com.platform.core.log;
import com.platform.core.util.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.helpers.FormattingTuple;
import org.slf4j.helpers.MessageFormatter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Date;
public class JobLogger {
private static Logger logger = LoggerFactory.getLogger("web logger");
/**
* append log
*
* @param call
* @param appendLog
*/
private static void logDetail(StackTraceElement call, String appendLog) {
// "yyyy-MM-dd HH:mm:ss [fileName.MethodName-LineNumber] log";
StringBuffer buffer = new StringBuffer();
buffer.append(DateUtil.formatDateTime(new Date())).append(" ")
.append("[" + call.getFileName().replace("java", "") + call.getMethodName())
.append("-" + call.getLineNumber() + "]").append(" ")
.append(appendLog != null ? appendLog : "");
String formatAppendLog = buffer.toString();
String logFileName = JobFileAppender.contextHolder.get();
if (logFileName != null && logFileName.trim().length() > 0) {
JobFileAppender.appendLog(logFileName, formatAppendLog);
} else {
logger.info(">>> {}", formatAppendLog);
}
}
/**
* append log with pattern
*
* @param appendLogPattern like "aaa {} bbb {} ccc"
* @param appendLogArguments like "111, true"
*/
public static void log(String appendLogPattern, Object... appendLogArguments) {
FormattingTuple ft = MessageFormatter.arrayFormat(appendLogPattern, appendLogArguments);
String appendLog = ft.getMessage();
/*appendLog = appendLogPattern;
if (appendLogArguments!=null && appendLogArguments.length>0) {
appendLog = MessageFormat.format(appendLogPattern, appendLogArguments);
}*/
StackTraceElement callInfo = new Throwable().getStackTrace()[1];
logDetail(callInfo, appendLog);
}
/**
* append exception stack
*
* @param e
*/
public static void log(Throwable e) {
StringWriter stringWriter = new StringWriter();
e.printStackTrace(new PrintWriter(stringWriter));
String appendLog = stringWriter.toString();
StackTraceElement callInfo = new Throwable().getStackTrace()[1];
logDetail(callInfo, appendLog);
}
}

View File

@@ -0,0 +1,121 @@
package com.platform.core.thread;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.model.RegistryParam;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.enums.RegistryConfig;
import com.platform.core.executor.JobExecutor;
import com.platform.core.util.OSUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
public class ExecutorRegistryThread {
private static Logger logger = LoggerFactory.getLogger(ExecutorRegistryThread.class);
private static ExecutorRegistryThread instance = new ExecutorRegistryThread();
public static ExecutorRegistryThread getInstance(){
return instance;
}
private Thread registryThread;
private volatile boolean toStop = false;
public void start(final String appName, final String address){
// valid
if (appName==null || appName.trim().length()==0) {
logger.warn(">>>>>>>>>>> web, executor registry config fail, appName is null.");
return;
}
if (JobExecutor.getAdminBizList() == null) {
logger.warn(">>>>>>>>>>> web, executor registry config fail, adminAddresses is null.");
return;
}
registryThread = new Thread(() -> {
// registry
while (!toStop) {
try {
RegistryParam registryParam = new RegistryParam(RegistryConfig.RegistType.EXECUTOR.name(), appName, address, OSUtils
.cpuUsage(),OSUtils.memoryUsage(),OSUtils.loadAverage());
for (AdminBiz adminBiz: JobExecutor.getAdminBizList()) {
try {
ReturnT<String> registryResult = adminBiz.registry(registryParam);
if (registryResult!=null && ReturnT.SUCCESS_CODE == registryResult.getCode()) {
registryResult = ReturnT.SUCCESS;
logger.debug(">>>>>>>>>>> web registry success, registryParam:{}, registryResult:{}", new Object[]{registryParam, registryResult});
break;
} else {
logger.info(">>>>>>>>>>> web registry fail, registryParam:{}, registryResult:{}", new Object[]{registryParam, registryResult});
}
} catch (Exception e) {
logger.info(">>>>>>>>>>> web registry error, registryParam:{}", registryParam, e);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
try {
if (!toStop) {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
}
} catch (InterruptedException e) {
if (!toStop) {
logger.warn(">>>>>>>>>>> web, executor registry thread interrupted, error msg:{}", e.getMessage());
}
}
}
// registry remove
try {
RegistryParam registryParam = new RegistryParam(RegistryConfig.RegistType.EXECUTOR.name(), appName, address);
for (AdminBiz adminBiz: JobExecutor.getAdminBizList()) {
try {
ReturnT<String> registryResult = adminBiz.registryRemove(registryParam);
if (registryResult!=null && ReturnT.SUCCESS_CODE == registryResult.getCode()) {
registryResult = ReturnT.SUCCESS;
logger.info(">>>>>>>>>>> web registry-remove success, registryParam:{}, registryResult:{}", new Object[]{registryParam, registryResult});
break;
} else {
logger.info(">>>>>>>>>>> web registry-remove fail, registryParam:{}, registryResult:{}", new Object[]{registryParam, registryResult});
}
} catch (Exception e) {
if (!toStop) {
logger.info(">>>>>>>>>>> web registry-remove error, registryParam:{}", registryParam, e);
}
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> web, executor registry thread destory.");
});
registryThread.setDaemon(true);
registryThread.setName("web, executor ExecutorRegistryThread");
registryThread.start();
}
public void toStop() {
toStop = true;
// interrupt and wait
registryThread.interrupt();
try {
registryThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,112 @@
package com.platform.core.thread;
import com.platform.core.log.JobFileAppender;
import com.platform.core.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.concurrent.TimeUnit;
public class JobLogFileCleanThread {
private static Logger logger = LoggerFactory.getLogger(JobLogFileCleanThread.class);
private static JobLogFileCleanThread instance = new JobLogFileCleanThread();
public static JobLogFileCleanThread getInstance() {
return instance;
}
private Thread localThread;
private volatile boolean toStop = false;
public void start(final long logRetentionDays) {
// limit min value
if (logRetentionDays < 3) {
return;
}
localThread = new Thread(() -> {
while (!toStop) {
try {
// clean log dir, over logRetentionDays
File[] childDirs = new File(JobFileAppender.getLogPath()).listFiles();
if (childDirs != null && childDirs.length > 0) {
// today
Calendar todayCal = Calendar.getInstance();
todayCal.set(Calendar.HOUR_OF_DAY, 0);
todayCal.set(Calendar.MINUTE, 0);
todayCal.set(Calendar.SECOND, 0);
todayCal.set(Calendar.MILLISECOND, 0);
Date todayDate = todayCal.getTime();
for (File childFile : childDirs) {
// valid
if (!childFile.isDirectory() || childFile.getName().indexOf("-") == -1) {
continue;
}
// file create date
Date logFileCreateDate = null;
try {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
logFileCreateDate = simpleDateFormat.parse(childFile.getName());
} catch (ParseException e) {
logger.error(e.getMessage(), e);
}
// valid
if (logFileCreateDate == null) {
continue;
}
if ((todayDate.getTime() - logFileCreateDate.getTime()) >= logRetentionDays * (24 * 60 * 60 * 1000)) {
FileUtil.deleteRecursively(childFile);
}
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
try {
TimeUnit.DAYS.sleep(1);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> web, executor JobLogFileCleanThread thread destory.");
});
localThread.setDaemon(true);
localThread.setName("web, executor JobLogFileCleanThread");
localThread.start();
}
public void toStop() {
toStop = true;
if (localThread == null) {
return;
}
// interrupt and wait
localThread.interrupt();
try {
localThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,214 @@
package com.platform.core.thread;
import com.platform.core.biz.model.HandleCallbackParam;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.executor.JobExecutor;
import com.platform.core.handler.IJobHandler;
import com.platform.core.log.JobFileAppender;
import com.platform.core.log.JobLogger;
import com.platform.core.util.ShardingUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.FutureTask;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
public class JobThread extends Thread {
private static Logger logger = LoggerFactory.getLogger(JobThread.class);
private int jobId;
private IJobHandler handler;
private LinkedBlockingQueue<TriggerParam> triggerQueue;
private Set<Long> triggerLogIdSet; // avoid repeat trigger for the same TRIGGER_LOG_ID
private volatile boolean toStop = false;
private String stopReason;
private boolean running = false; // if running job
private int idleTimes = 0; // idel times
public JobThread(int jobId, IJobHandler handler) {
this.jobId = jobId;
this.handler = handler;
this.triggerQueue = new LinkedBlockingQueue<>();
this.triggerLogIdSet = Collections.synchronizedSet(new HashSet<>());
}
public IJobHandler getHandler() {
return handler;
}
/**
* new trigger to queue
*
* @param triggerParam
* @return
*/
public ReturnT<String> pushTriggerQueue(TriggerParam triggerParam) {
// avoid repeat
if (triggerLogIdSet.contains(triggerParam.getLogId())) {
logger.info(">>>>>>>>>>> repeate trigger job, logId:{}", triggerParam.getLogId());
return new ReturnT<>(ReturnT.FAIL_CODE, "repeate trigger job, logId:" + triggerParam.getLogId());
}
triggerLogIdSet.add(triggerParam.getLogId());
triggerQueue.add(triggerParam);
return ReturnT.SUCCESS;
}
/**
* kill job thread
*
* @param stopReason
*/
public void toStop(String stopReason) {
/**
* Thread.interrupt只支持终止线程的阻塞状态(wait、join、sleep)
* 在阻塞出抛出InterruptedException异常,但是并不会终止运行的线程本身;
* 所以需要注意,此处彻底销毁本线程,需要通过共享变量方式;
*/
this.toStop = true;
this.stopReason = stopReason;
}
/**
* is running job
*
* @return
*/
public boolean isRunningOrHasQueue() {
return running || triggerQueue.size() > 0;
}
@Override
public void run() {
// init
try {
handler.init();
} catch (Throwable e) {
logger.error(e.getMessage(), e);
}
// execute
while (!toStop) {
running = false;
idleTimes++;
TriggerParam tgParam = null;
ReturnT<String> executeResult = null;
try {
// to check toStop signal, we need cycle, so wo cannot use queue.take(), instand of poll(timeout)
tgParam = triggerQueue.poll(3L, TimeUnit.SECONDS);
if (tgParam != null) {
running = true;
idleTimes = 0;
triggerLogIdSet.remove(tgParam.getLogId());
// log filename, like "logPath/yyyy-MM-dd/9999.log"
String logFileName = JobFileAppender.makeLogFileName(new Date(tgParam.getLogDateTime()), tgParam.getLogId());
JobFileAppender.contextHolder.set(logFileName);
ShardingUtil.setShardingVo(new ShardingUtil.ShardingVO(tgParam.getBroadcastIndex(), tgParam.getBroadcastTotal()));
// execute
JobLogger.log("<br>----------- web job execute start -----------<br>----------- Param:" + tgParam.getExecutorParams());
if (tgParam.getExecutorTimeout() > 0) {
// limit timeout
Thread futureThread = null;
try {
final TriggerParam tgParamT = tgParam;
FutureTask<ReturnT<String>> futureTask = new FutureTask<>(() -> handler.execute(tgParamT));
futureThread = new Thread(futureTask);
futureThread.start();
executeResult = futureTask.get(tgParam.getExecutorTimeout(), TimeUnit.MINUTES);
} catch (TimeoutException e) {
JobLogger.log("<br>----------- web job execute timeout");
JobLogger.log(e);
executeResult = new ReturnT<>(IJobHandler.FAIL_TIMEOUT.getCode(), "job execute timeout ");
} finally {
futureThread.interrupt();
}
} else {
// just execute
executeResult = handler.execute(tgParam);
}
if (executeResult == null) {
executeResult = IJobHandler.FAIL;
} else {
executeResult.setMsg(
(executeResult != null && executeResult.getMsg() != null && executeResult.getMsg().length() > 50000)
? executeResult.getMsg().substring(0, 50000).concat("...")
: executeResult.getMsg());
executeResult.setContent(null); // limit obj size
}
JobLogger.log("<br>----------- web job execute end(finish) -----------<br>----------- ReturnT:" + executeResult);
} else {
if (idleTimes > 30) {
if (triggerQueue.size() == 0) { // avoid concurrent trigger causes jobId-lost
JobExecutor.removeJobThread(jobId, "executor idel times over limit.");
}
}
}
} catch (Throwable e) {
if (toStop) {
JobLogger.log("<br>----------- JobThread toStop, stopReason:" + stopReason);
}
StringWriter stringWriter = new StringWriter();
e.printStackTrace(new PrintWriter(stringWriter));
String errorMsg = stringWriter.toString();
executeResult = new ReturnT<>(ReturnT.FAIL_CODE, errorMsg);
JobLogger.log("<br>----------- JobThread Exception:" + errorMsg + "<br>----------- web job execute end(error) -----------");
} finally {
// 终止操作暂不监控状态
if (tgParam != null && tgParam.getJobId() != -1) {
// callback handler info
if (!toStop) {
// commonm
TriggerCallbackThread.pushCallBack(new HandleCallbackParam(tgParam.getLogId(), tgParam.getLogDateTime(), executeResult));
} else {
// is killed
ReturnT<String> stopResult = new ReturnT<String>(ReturnT.FAIL_CODE, stopReason + " [job running, killed]");
TriggerCallbackThread.pushCallBack(new HandleCallbackParam(tgParam.getLogId(), tgParam.getLogDateTime(), stopResult));
}
}
}
}
// callback trigger request in queue
while (triggerQueue != null && triggerQueue.size() > 0) {
TriggerParam triggerParam = triggerQueue.poll();
if (triggerParam != null) {
// is killed
ReturnT<String> stopResult = new ReturnT<String>(ReturnT.FAIL_CODE, stopReason + " [job not executed, in the job queue, killed.]");
TriggerCallbackThread.pushCallBack(new HandleCallbackParam(triggerParam.getLogId(), triggerParam.getLogDateTime(), stopResult));
}
}
// destroy
try {
handler.destroy();
} catch (Throwable e) {
logger.error(e.getMessage(), e);
}
logger.info(">>>>>>>>>>> web JobThread stoped, hashCode:{}", Thread.currentThread());
}
}

View File

@@ -0,0 +1,238 @@
package com.platform.core.thread;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.model.HandleProcessCallbackParam;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.enums.RegistryConfig;
import com.platform.core.executor.JobExecutor;
import com.platform.core.log.JobFileAppender;
import com.platform.core.log.JobLogger;
import com.platform.core.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
public class ProcessCallbackThread {
private static Logger logger = LoggerFactory.getLogger(ProcessCallbackThread.class);
private static ProcessCallbackThread instance = new ProcessCallbackThread();
public static ProcessCallbackThread getInstance() {
return instance;
}
/**
* job results callback queue
*/
private LinkedBlockingQueue<HandleProcessCallbackParam> callBackQueue = new LinkedBlockingQueue<>();
public static void pushCallBack(HandleProcessCallbackParam callback) {
getInstance().callBackQueue.add(callback);
logger.debug(">>>>>>>>>>> web, push process callback request, logId:{}", callback.getLogId());
}
/**
* callback thread
*/
private Thread processCallbackThread;
private Thread processRetryCallbackThread;
private volatile boolean toStop = false;
public void start() {
// valid
if (JobExecutor.getAdminBizList() == null) {
logger.warn(">>>>>>>>>>> web, executor callback config fail, adminAddresses is null.");
return;
}
// callback
processCallbackThread = new Thread(() -> {
// normal callback
while (!toStop) {
try {
HandleProcessCallbackParam callback = getInstance().callBackQueue.take();
// callback list param
List<HandleProcessCallbackParam> callbackParamList = new ArrayList<HandleProcessCallbackParam>();
int drainToNum = getInstance().callBackQueue.drainTo(callbackParamList);
callbackParamList.add(callback);
// callback, will retry if error
if (callbackParamList.size() > 0) {
doCallback(callbackParamList);
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
// last callback
try {
List<HandleProcessCallbackParam> callbackParamList = new ArrayList<HandleProcessCallbackParam>();
int drainToNum = getInstance().callBackQueue.drainTo(callbackParamList);
if (callbackParamList != null && callbackParamList.size() > 0) {
doCallback(callbackParamList);
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> web, executor callback thread destory.");
});
processCallbackThread.setDaemon(true);
processCallbackThread.setName("web, executor TriggerCallbackThread");
processCallbackThread.start();
// retry
processRetryCallbackThread = new Thread(() -> {
while (!toStop) {
try {
retryFailCallbackFile();
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
try {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> web, executor retry callback thread destory.");
});
processRetryCallbackThread.setDaemon(true);
processRetryCallbackThread.start();
}
public void toStop() {
toStop = true;
// stop callback, interrupt and wait
if (processCallbackThread != null) { // support empty admin address
processCallbackThread.interrupt();
try {
processCallbackThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// stop retry, interrupt and wait
if (processRetryCallbackThread != null) {
processRetryCallbackThread.interrupt();
try {
processRetryCallbackThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}
/**
* do callback, will retry if error
*
* @param callbackParamList
*/
private void doCallback(List<HandleProcessCallbackParam> callbackParamList) {
boolean callbackRet = false;
// callback, will retry if error
for (AdminBiz adminBiz : JobExecutor.getAdminBizList()) {
try {
ReturnT<String> callbackResult = adminBiz.processCallback(callbackParamList);
if (callbackResult != null && ReturnT.SUCCESS_CODE == callbackResult.getCode()) {
callbackLog(callbackParamList, "<br>----------- web job callback finish.");
callbackRet = true;
break;
} else {
callbackLog(callbackParamList, "<br>----------- web job callback fail, callbackResult:" + callbackResult);
}
} catch (Exception e) {
callbackLog(callbackParamList, "<br>----------- web job callback error, errorMsg:" + e.getMessage());
}
}
if (!callbackRet) {
appendFailCallbackFile(callbackParamList);
}
}
/**
* callback log
*/
private void callbackLog(List<HandleProcessCallbackParam> callbackParamList, String logContent) {
for (HandleProcessCallbackParam callbackParam : callbackParamList) {
String logFileName = JobFileAppender.makeLogFileName(new Date(callbackParam.getLogDateTime()), callbackParam.getLogId());
JobFileAppender.contextHolder.set(logFileName);
JobLogger.log(logContent);
}
}
// ---------------------- fail-callback file ----------------------
private static String failCallbackFilePath = JobFileAppender.getLogPath().concat(File.separator).concat("processcallbacklog").concat(File.separator);
private static String failCallbackFileName = failCallbackFilePath.concat("web-processcallback-{x}").concat(".log");
private void appendFailCallbackFile(List<HandleProcessCallbackParam> handleProcessCallbackParams) {
// valid
if (handleProcessCallbackParams == null || handleProcessCallbackParams.size() == 0) {
return;
}
// append file
byte[] callbackParamList_bytes = JobExecutor.getSerializer().serialize(handleProcessCallbackParams);
File callbackLogFile = new File(failCallbackFileName.replace("{x}", String.valueOf(System.currentTimeMillis())));
if (callbackLogFile.exists()) {
for (int i = 0; i < 100; i++) {
callbackLogFile = new File(failCallbackFileName.replace("{x}", String.valueOf(System.currentTimeMillis()).concat("-").concat(String.valueOf(i))));
if (!callbackLogFile.exists()) {
break;
}
}
}
FileUtil.writeFileContent(callbackLogFile, callbackParamList_bytes);
}
private void retryFailCallbackFile() {
// valid
File callbackLogPath = new File(failCallbackFilePath);
if (!callbackLogPath.exists()) {
return;
}
if (callbackLogPath.isFile()) {
callbackLogPath.delete();
}
if (!(callbackLogPath.isDirectory() && callbackLogPath.list() != null && callbackLogPath.list().length > 0)) {
return;
}
// load and clear file, retry
List<HandleProcessCallbackParam> params;
for (File f : callbackLogPath.listFiles()) {
byte[] ps = FileUtil.readFileContent(f);
params = (List<HandleProcessCallbackParam>) JobExecutor.getSerializer().deserialize(ps, HandleProcessCallbackParam.class);
f.delete();
doCallback(params);
}
}
}

View File

@@ -0,0 +1,239 @@
package com.platform.core.thread;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.model.HandleCallbackParam;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.enums.RegistryConfig;
import com.platform.core.executor.JobExecutor;
import com.platform.core.log.JobFileAppender;
import com.platform.core.log.JobLogger;
import com.platform.core.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
public class TriggerCallbackThread {
private static Logger logger = LoggerFactory.getLogger(TriggerCallbackThread.class);
private static TriggerCallbackThread instance = new TriggerCallbackThread();
public static TriggerCallbackThread getInstance() {
return instance;
}
/**
* job results callback queue
*/
private LinkedBlockingQueue<HandleCallbackParam> callBackQueue = new LinkedBlockingQueue<>();
public static void pushCallBack(HandleCallbackParam callback) {
getInstance().callBackQueue.add(callback);
logger.debug(">>>>>>>>>>> web, push callback request, logId:{}", callback.getLogId());
}
/**
* callback thread
*/
private Thread triggerCallbackThread;
private Thread triggerRetryCallbackThread;
private volatile boolean toStop = false;
public void start() {
// valid
if (JobExecutor.getAdminBizList() == null) {
logger.warn(">>>>>>>>>>> web, executor callback config fail, adminAddresses is null.");
return;
}
// callback
triggerCallbackThread = new Thread(() -> {
// normal callback
while (!toStop) {
try {
HandleCallbackParam callback = getInstance().callBackQueue.take();
// callback list param
List<HandleCallbackParam> callbackParamList = new ArrayList<HandleCallbackParam>();
int drainToNum = getInstance().callBackQueue.drainTo(callbackParamList);
callbackParamList.add(callback);
// callback, will retry if error
if (callbackParamList.size() > 0) {
doCallback(callbackParamList);
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
// last callback
try {
List<HandleCallbackParam> callbackParamList = new ArrayList<HandleCallbackParam>();
int drainToNum = getInstance().callBackQueue.drainTo(callbackParamList);
if (callbackParamList != null && callbackParamList.size() > 0) {
doCallback(callbackParamList);
}
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> web, executor callback thread destory.");
});
triggerCallbackThread.setDaemon(true);
triggerCallbackThread.setName("web, executor TriggerCallbackThread");
triggerCallbackThread.start();
// retry
triggerRetryCallbackThread = new Thread(() -> {
while (!toStop) {
try {
retryFailCallbackFile();
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
try {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> web, executor retry callback thread destory.");
});
triggerRetryCallbackThread.setDaemon(true);
triggerRetryCallbackThread.start();
}
public void toStop() {
toStop = true;
// stop callback, interrupt and wait
if (triggerCallbackThread != null) { // support empty admin address
triggerCallbackThread.interrupt();
try {
triggerCallbackThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// stop retry, interrupt and wait
if (triggerRetryCallbackThread != null) {
triggerRetryCallbackThread.interrupt();
try {
triggerRetryCallbackThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}
/**
* do callback, will retry if error
*
* @param callbackParamList
*/
private void doCallback(List<HandleCallbackParam> callbackParamList) {
boolean callbackRet = false;
// callback, will retry if error
for (AdminBiz adminBiz : JobExecutor.getAdminBizList()) {
try {
ReturnT<String> callbackResult = adminBiz.callback(callbackParamList);
if (callbackResult != null && ReturnT.SUCCESS_CODE == callbackResult.getCode()) {
callbackLog(callbackParamList, "<br>----------- web job callback finish.");
callbackRet = true;
break;
} else {
callbackLog(callbackParamList, "<br>----------- web job callback fail, callbackResult:" + callbackResult);
}
} catch (Exception e) {
callbackLog(callbackParamList, "<br>----------- web job callback error, errorMsg:" + e.getMessage());
}
}
if (!callbackRet) {
appendFailCallbackFile(callbackParamList);
}
}
/**
* callback log
*/
private void callbackLog(List<HandleCallbackParam> callbackParamList, String logContent) {
for (HandleCallbackParam c : callbackParamList) {
String logFileName = JobFileAppender.makeLogFileName(new Date(c.getLogDateTim()), c.getLogId());
JobFileAppender.contextHolder.set(logFileName);
JobLogger.log(logContent);
}
}
// ---------------------- fail-callback file ----------------------
private static String failCallbackFilePath = JobFileAppender.getLogPath().concat(File.separator).concat("callbacklog").concat(File.separator);
private static String failCallbackFileName = failCallbackFilePath.concat("web-callback-{x}").concat(".log");
private void appendFailCallbackFile(List<HandleCallbackParam> callbackParamList) {
// valid
if (callbackParamList == null || callbackParamList.size() == 0) {
return;
}
// append file
byte[] callbackParamList_bytes = JobExecutor.getSerializer().serialize(callbackParamList);
File callbackLogFile = new File(failCallbackFileName.replace("{x}", String.valueOf(System.currentTimeMillis())));
if (callbackLogFile.exists()) {
for (int i = 0; i < 100; i++) {
callbackLogFile = new File(failCallbackFileName.replace("{x}", String.valueOf(System.currentTimeMillis()).concat("-").concat(String.valueOf(i))));
if (!callbackLogFile.exists()) {
break;
}
}
}
FileUtil.writeFileContent(callbackLogFile, callbackParamList_bytes);
}
private void retryFailCallbackFile() {
// valid
File callbackLogPath = new File(failCallbackFilePath);
if (!callbackLogPath.exists()) {
return;
}
if (callbackLogPath.isFile()) {
callbackLogPath.delete();
}
if (!(callbackLogPath.isDirectory() && callbackLogPath.list() != null && callbackLogPath.list().length > 0)) {
return;
}
// load and clear file, retry
List<HandleCallbackParam> params;
for (File f : callbackLogPath.listFiles()) {
byte[] ps = FileUtil.readFileContent(f);
params = (List<HandleCallbackParam>) JobExecutor.getSerializer().deserialize(ps, HandleCallbackParam.class);
f.delete();
doCallback(params);
}
}
}

View File

@@ -0,0 +1,65 @@
package com.platform.core.util;
public final class Constants {
public static final String MYSQL_DATABASE = "Unknown database";
public static final String MYSQL_CONNEXP = "Communications link failure";
public static final String MYSQL_ACCDENIED = "Access denied";
public static final String MYSQL_TABLE_NAME_ERR1 = "Table";
public static final String MYSQL_TABLE_NAME_ERR2 = "doesn't exist";
public static final String MYSQL_SELECT_PRI = "SELECT command denied to user";
public static final String MYSQL_COLUMN1 = "Unknown column";
public static final String MYSQL_COLUMN2 = "field list";
public static final String MYSQL_WHERE = "where clause";
public static final String ORACLE_DATABASE = "ORA-12505";
public static final String ORACLE_CONNEXP = "The Network Adapter could not establish the connection";
public static final String ORACLE_ACCDENIED = "ORA-01017";
public static final String ORACLE_TABLE_NAME = "table or view does not exist";
public static final String ORACLE_SELECT_PRI = "insufficient privileges";
public static final String ORACLE_SQL = "invalid identifier";
public static final String CMDWINDOW ="cmd /c python";
public static final String CMDLINUX ="python";
public static final String CMDWINDOWTASKKILL ="taskkill /pid ";
public static final String CMDLINUXTASKKILL ="kill -9 ";
public static final String SPLIT_COMMA = ",";
public static final String SPLIT_AT = "@";
public static final String SPLIT_COLON = ";";
public static final String SPLIT_POINT = ".";
public static final String SPLIT_SCOLON=":";
public static final String SPLIT_HYPHEN = "-";
public static final String SPLIT_DIVIDE = "/";
public static final String SPLIT_STAR = "*";
public static final String SPLIT_QUESTION = "?";
public static final String EQUAL = "=";
public static final String SPLIT_AMPERSAND = "&";
public static final String AND = "AND";
public static final String SPACE = " ";
public static final String STRING_BLANK = "";
public static final String MONGO_URL_PREFIX = "mongodb://";
/**
* UTF-8 字符集
*/
public static final String UTF8CODE = "UTF-8";
/**
* GBK 字符集
*/
public static final String GBKCODE = "GBK";
/**
* http请求
*/
public static final String HTTPCODE = "http://";
/**
* https请求
*/
public static final String HTTPSCODE = "https://";
}

View File

@@ -0,0 +1,143 @@
package com.platform.core.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class DateUtil {
// ---------------------- format parse ----------------------
private static Logger logger = LoggerFactory.getLogger(DateUtil.class);
private static final String DATE_FORMAT = "yyyy-MM-dd";
private static final String DATETIME_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final ThreadLocal<Map<String, DateFormat>> dateFormatThreadLocal = new ThreadLocal<Map<String, DateFormat>>();
private static DateFormat getDateFormat(String pattern) {
if (pattern==null || pattern.trim().length()==0) {
throw new IllegalArgumentException("pattern cannot be empty.");
}
Map<String, DateFormat> dateFormatMap = dateFormatThreadLocal.get();
if(dateFormatMap!=null && dateFormatMap.containsKey(pattern)){
return dateFormatMap.get(pattern);
}
synchronized (dateFormatThreadLocal) {
if (dateFormatMap == null) {
dateFormatMap = new HashMap<>();
}
dateFormatMap.put(pattern, new SimpleDateFormat(pattern));
dateFormatThreadLocal.set(dateFormatMap);
}
return dateFormatMap.get(pattern);
}
/**
* format datetime. like "yyyy-MM-dd"
*
* @param date
* @return
* @throws ParseException
*/
public static String formatDate(Date date) {
return format(date, DATE_FORMAT);
}
/**
* format date. like "yyyy-MM-dd HH:mm:ss"
*
* @param date
* @return
* @throws ParseException
*/
public static String formatDateTime(Date date) {
return format(date, DATETIME_FORMAT);
}
/**
* format date
*
* @param date
* @param patten
* @return
* @throws ParseException
*/
public static String format(Date date, String patten) {
return getDateFormat(patten).format(date);
}
/**
* parse date string, like "yyyy-MM-dd HH:mm:s"
*
* @param dateString
* @return
* @throws ParseException
*/
public static Date parseDate(String dateString){
return parse(dateString, DATE_FORMAT);
}
/**
* parse datetime string, like "yyyy-MM-dd HH:mm:ss"
*
* @param dateString
* @return
* @throws ParseException
*/
public static Date parseDateTime(String dateString) {
return parse(dateString, DATETIME_FORMAT);
}
/**
* parse date
*
* @param dateString
* @param pattern
* @return
* @throws ParseException
*/
public static Date parse(String dateString, String pattern) {
try {
Date date = getDateFormat(pattern).parse(dateString);
return date;
} catch (Exception e) {
logger.warn("parse date error, dateString = {}, pattern={}; errorMsg = {}", dateString, pattern, e.getMessage());
return null;
}
}
// ---------------------- add date ----------------------
public static Date addYears(final Date date, final int amount) {
return add(date, Calendar.YEAR, amount);
}
public static Date addMonths(final Date date, final int amount) {
return add(date, Calendar.MONTH, amount);
}
public static Date addDays(final Date date, final int amount) {
return add(date, Calendar.DAY_OF_MONTH, amount);
}
private static Date add(final Date date, final int calendarField, final int amount) {
if (date == null) {
return null;
}
final Calendar c = Calendar.getInstance();
c.setTime(date);
c.add(calendarField, amount);
return c.getTime();
}
}

View File

@@ -0,0 +1,174 @@
package com.platform.core.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
public class FileUtil {
private static Logger logger = LoggerFactory.getLogger(FileUtil.class);
/**
* delete recursively
*
* @param root
* @return
*/
public static boolean deleteRecursively(File root) {
if (root != null && root.exists()) {
if (root.isDirectory()) {
File[] children = root.listFiles();
if (children != null) {
for (File child : children) {
deleteRecursively(child);
}
}
}
return root.delete();
}
return false;
}
public static void deleteFile(String fileName) {
// file
File file = new File(fileName);
if (file.exists()) {
file.delete();
}
}
public static void writeFileContent(File file, byte[] data) {
// file
if (!file.exists()) {
file.getParentFile().mkdirs();
}
// append file content
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
fos.write(data);
fos.flush();
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
}
public static byte[] readFileContent(File file) {
Long filelength = file.length();
byte[] filecontent = new byte[filelength.intValue()];
FileInputStream in = null;
try {
in = new FileInputStream(file);
in.read(filecontent);
in.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
return filecontent;
}
/*public static void appendFileLine(String fileName, String content) {
// file
File file = new File(fileName);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
logger.error(e.getMessage(), e);
return;
}
}
// content
if (content == null) {
content = "";
}
content += "\r\n";
// append file content
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file, true);
fos.write(content.getBytes("utf-8"));
fos.flush();
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
}
public static List<String> loadFileLines(String fileName){
List<String> result = new ArrayList<>();
// valid log file
File file = new File(fileName);
if (!file.exists()) {
return result;
}
// read file
StringBuffer logContentBuffer = new StringBuffer();
int toLineNum = 0;
LineNumberReader reader = null;
try {
//reader = new LineNumberReader(new FileReader(logFile));
reader = new LineNumberReader(new InputStreamReader(new FileInputStream(file), "utf-8"));
String line = null;
while ((line = reader.readLine())!=null) {
if (line!=null && line.trim().length()>0) {
result.add(line);
}
}
} catch (IOException e) {
logger.error(e.getMessage(), e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
return result;
}*/
}

View File

@@ -0,0 +1,119 @@
package com.platform.core.util;
import com.platform.core.biz.model.ReturnT;
import com.platform.rpc.util.json.BasicJson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Map;
public class JobRemotingUtil {
private static Logger logger = LoggerFactory.getLogger(JobRemotingUtil.class);
public static String XXL_RPC_ACCESS_TOKEN = "XXL-RPC-ACCESS-TOKEN";
/**
* post
*
* @param url
* @param accessToken
* @param requestObj
* @return
*/
public static ReturnT<String> postBody(String url, String accessToken, Object requestObj, int timeout) {
HttpURLConnection connection = null;
BufferedReader bufferedReader = null;
try {
// connection
URL realUrl = new URL(url);
connection = (HttpURLConnection) realUrl.openConnection();
// connection setting
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setDoInput(true);
connection.setUseCaches(false);
connection.setReadTimeout(timeout * 1000);
connection.setConnectTimeout(3 * 1000);
connection.setRequestProperty("connection", "Keep-Alive");
connection.setRequestProperty("Content-Type", "application/json;charset=UTF-8");
connection.setRequestProperty("Accept-Charset", "application/json;charset=UTF-8");
if(accessToken!=null && accessToken.trim().length()>0){
connection.setRequestProperty(XXL_RPC_ACCESS_TOKEN, accessToken);
}
// do connection
connection.connect();
// write requestBody
String requestBody = BasicJson.toJson(requestObj);
DataOutputStream dataOutputStream = new DataOutputStream(connection.getOutputStream());
dataOutputStream.writeBytes(requestBody);
dataOutputStream.flush();
dataOutputStream.close();
/*byte[] requestBodyBytes = requestBody.getBytes("UTF-8");
connection.setRequestProperty("Content-Length", String.valueOf(requestBodyBytes.length));
OutputStream outwritestream = connection.getOutputStream();
outwritestream.write(requestBodyBytes);
outwritestream.flush();
outwritestream.close();*/
// valid StatusCode
int statusCode = connection.getResponseCode();
if (statusCode != 200) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "xxl-rpc remoting fail, StatusCode("+ statusCode +") invalid. for url : " + url);
}
// result
bufferedReader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
StringBuilder result = new StringBuilder();
String line;
while ((line = bufferedReader.readLine()) != null) {
result.append(line);
}
String resultJson = result.toString();
// parse returnT
try {
Map<String, Object> resultMap = BasicJson.parseMap(resultJson);
ReturnT<String> returnT = new ReturnT<String>();
if (resultMap==null) {
returnT.setCode(ReturnT.FAIL_CODE);
returnT.setMsg("AdminBizClient Remoting call fail.");
} else {
returnT.setCode(Integer.valueOf(String.valueOf(resultMap.get("code"))));
returnT.setMsg(String.valueOf(resultMap.get("msg")));
returnT.setContent(String.valueOf(resultMap.get("content")));
}
return returnT;
} catch (Exception e) {
logger.error("xxl-rpc remoting (url="+url+") response content invalid("+ resultJson +").", e);
return new ReturnT<String>(ReturnT.FAIL_CODE, "xxl-rpc remoting (url="+url+") response content invalid("+ resultJson +").");
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<String>(ReturnT.FAIL_CODE, "xxl-rpc remoting error("+ e.getMessage() +"), for url : " + url);
} finally {
try {
if (bufferedReader != null) {
bufferedReader.close();
}
if (connection != null) {
connection.disconnect();
}
} catch (Exception e2) {
logger.error(e2.getMessage(), e2);
}
}
}
}

View File

@@ -0,0 +1,11 @@
package com.platform.core.util;
import com.sun.jna.Library;
import com.sun.jna.Native;
public interface Kernel32 extends Library {
Kernel32 INSTANCE = (Kernel32) Native.loadLibrary("kernel32", Kernel32.class);
long GetProcessId(Long hProcess);
}

View File

@@ -0,0 +1,128 @@
package com.platform.core.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.GlobalMemory;
import oshi.hardware.HardwareAbstractionLayer;
import java.math.RoundingMode;
import java.text.DecimalFormat;
/**
* os utils
*/
public class OSUtils {
private static final Logger logger = LoggerFactory.getLogger(OSUtils.class);
private static final SystemInfo SI = new SystemInfo();
public static final String TWO_DECIMAL = "0.00";
private static HardwareAbstractionLayer hal = SI.getHardware();
private OSUtils() {
}
/**
* get memory usage
* Keep 2 decimal
*
* @return percent %
*/
public static double memoryUsage() {
GlobalMemory memory = hal.getMemory();
double memoryUsage = (memory.getTotal() - memory.getAvailable()) * 1.0 / memory.getTotal();
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(memoryUsage * 100));
}
/**
* get available physical memory size
* <p>
* Keep 2 decimal
*
* @return available Physical Memory Size, unit: G
*/
public static double availablePhysicalMemorySize() {
GlobalMemory memory = hal.getMemory();
double availablePhysicalMemorySize = (memory.getAvailable() + memory.getSwapUsed()) / 1024.0 / 1024 / 1024;
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(availablePhysicalMemorySize));
}
/**
* get total physical memory size
* <p>
* Keep 2 decimal
*
* @return available Physical Memory Size, unit: G
*/
public static double totalMemorySize() {
GlobalMemory memory = hal.getMemory();
double availablePhysicalMemorySize = memory.getTotal() / 1024.0 / 1024 / 1024;
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(availablePhysicalMemorySize));
}
/**
* load average
*
* @return load average
*/
public static double loadAverage() {
double loadAverage = hal.getProcessor().getSystemLoadAverage();
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(loadAverage));
}
/**
* get cpu usage
*
* @return cpu usage
*/
public static double cpuUsage() {
CentralProcessor processor = hal.getProcessor();
double cpuUsage = processor.getSystemCpuLoad();
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(cpuUsage*100));
}
/**
* check memory and cpu usage
*
* @return check memory and cpu usage
*/
public static Boolean checkResource(double systemCpuLoad, double systemReservedMemory) {
// judging usage
double loadAverage = OSUtils.loadAverage();
//
double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize();
if (loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory) {
logger.warn("load or availablePhysicalMemorySize(G) is too high, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize, loadAverage);
return false;
} else {
return true;
}
}
}

View File

@@ -0,0 +1,93 @@
package com.platform.core.util;
import com.platform.core.thread.JobThread;
import com.platform.core.log.JobLogger;
import com.sun.jna.Platform;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
/**
* ProcessUtil
*/
public class ProcessUtil {
private static Logger logger = LoggerFactory.getLogger(JobThread.class);
public static String getProcessId(Process process) {
long pid = -1;
Field field;
if (Platform.isWindows()) {
try {
field = process.getClass().getDeclaredField("handle");
field.setAccessible(true);
pid = Kernel32.INSTANCE.GetProcessId((Long) field.get(process));
} catch (Exception ex) {
logger.error("get process id for windows error {0}", ex);
}
} else if (Platform.isLinux() || Platform.isAIX()) {
try {
Class<?> clazz = Class.forName("java.lang.UNIXProcess");
field = clazz.getDeclaredField("pid");
field.setAccessible(true);
pid = (Integer) field.get(process);
} catch (Throwable e) {
logger.error("get process id for unix error {0}", e);
}
}
return String.valueOf(pid);
}
/**
* 关闭Linux进程
*
* @param pid 进程的PID
*/
public static boolean killProcessByPid(String pid) {
if (StringUtils.isEmpty(pid) || "-1".equals(pid)) {
throw new RuntimeException("Pid ==" + pid);
}
Process process = null;
BufferedReader reader = null;
String command = "";
boolean result;
if (Platform.isWindows()) {
command = "cmd.exe /c taskkill /PID " + pid + " /F /T ";
} else if (Platform.isLinux() || Platform.isAIX()) {
command = "kill " + pid;
}
try {
//杀掉进程
process = Runtime.getRuntime().exec(command);
reader = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8));
String line;
while ((line = reader.readLine()) != null) {
JobLogger.log(line);
}
result = true;
} catch (Exception e) {
logger.error("kill pid error {0}", e);
result = false;
} finally {
if (process != null) {
process.destroy();
}
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
logger.error("reader close error {0}", e);
}
}
}
return result;
}
}

View File

@@ -0,0 +1,229 @@
package com.platform.core.util;
import com.platform.core.thread.ProcessCallbackThread;
import com.platform.core.biz.model.HandleProcessCallbackParam;
import com.platform.core.log.JobLogger;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
/**
* 1、内嵌编译器如"PythonInterpreter"无法引用扩展包因此推荐使用java调用控制台进程方式"Runtime.getRuntime().exec()"来运行脚本(shell或python)
* 2、因为通过java调用控制台进程方式实现需要保证目标机器PATH路径正确配置对应编译器
* 3、暂时脚本执行日志只能在脚本执行结束后一次性获取无法保证实时性因此为确保日志实时性可改为将脚本打印的日志存储在指定的日志文件上
* 4、python 异常输出优先级高于标准输出体现在Log文件中因此推荐通过logging方式打日志保持和异常信息一致否则用prinf日志顺序会错乱
* <p>
*/
public class ScriptUtil {
/**
* make script file
*
* @param scriptFileName
* @param content
* @throws IOException
*/
public static void markScriptFile(String scriptFileName, String content) throws IOException {
// make file, filePath/gluesource/666-123456789.py
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(scriptFileName);
fileOutputStream.write(content.getBytes("UTF-8"));
fileOutputStream.close();
} catch (Exception e) {
throw e;
} finally {
if (fileOutputStream != null) {
fileOutputStream.close();
}
}
}
/**
* 脚本执行,日志文件实时输出
*
* @param command
* @param scriptFile
* @param logFile
* @param params
* @return
*/
public static int execToFile(String command, String scriptFile, String logFile,long logId,long logDateTime, String... params) {
FileOutputStream fileOutputStream = null;
Thread inputThread = null;
Thread errThread = null;
try {
// file
fileOutputStream = new FileOutputStream(logFile, true);
// command
List<String> cmdarray = new ArrayList<>();
cmdarray.add(command);
cmdarray.add(scriptFile);
if (params != null && params.length > 0) {
for (String param : params) {
cmdarray.add(param);
}
}
String[] cmdarrayFinal = cmdarray.toArray(new String[cmdarray.size()]);
// process-exec
final Process process = Runtime.getRuntime().exec(cmdarrayFinal);
String prcsId = ProcessUtil.getProcessId(process);
JobLogger.log("------------------Process id: " + prcsId);
//update task process id
HandleProcessCallbackParam prcs = new HandleProcessCallbackParam(logId, logDateTime, prcsId);
ProcessCallbackThread.pushCallBack(prcs);
// log-thread
final FileOutputStream finalFileOutputStream = fileOutputStream;
inputThread = new Thread(() -> {
try {
copy(process.getInputStream(), finalFileOutputStream, new byte[1024]);
} catch (IOException e) {
JobLogger.log(e);
}
});
errThread = new Thread(() -> {
try {
copy(process.getErrorStream(), finalFileOutputStream, new byte[1024]);
} catch (IOException e) {
JobLogger.log(e);
}
});
inputThread.start();
errThread.start();
// process-wait
int exitValue = process.waitFor(); // exit code: 0=success, 1=error
// log-thread join
inputThread.join();
errThread.join();
return exitValue;
} catch (Exception e) {
JobLogger.log(e);
return -1;
} finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
JobLogger.log(e);
}
}
if (inputThread != null && inputThread.isAlive()) {
inputThread.interrupt();
}
if (errThread != null && errThread.isAlive()) {
errThread.interrupt();
}
}
}
/**
* 数据流CopyInput自动关闭Output不处理
*
* @param inputStream
* @param outputStream
* @param buffer
* @return
* @throws IOException
*/
private static long copy(InputStream inputStream, OutputStream outputStream, byte[] buffer) throws IOException {
try {
long total = 0;
for (; ; ) {
int res = inputStream.read(buffer);
if (res == -1) {
break;
}
if (res > 0) {
total += res;
if (outputStream != null) {
outputStream.write(buffer, 0, res);
}
}
}
outputStream.flush();
//out = null;
inputStream.close();
inputStream = null;
return total;
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
/**
* 脚本执行,日志文件实时输出
*
* 优点:支持将目标数据实时输出到指定日志文件中去
* 缺点:
* 标准输出和错误输出优先级固定,可能和脚本中顺序不一致
* Java无法实时获取
*
* <!-- commons-exec -->
* <dependency>
* <groupId>org.apache.commons</groupId>
* <artifactId>commons-exec</artifactId>
* <version>${commons-exec.version}</version>
* </dependency>
*
* @param command
* @param scriptFile
* @param logFile
* @param params
* @return
* @throws IOException
*/
/*public static int execToFileB(String command, String scriptFile, String logFile, String... params) throws IOException {
// 标准输出print null if watchdog timeout
// 错误输出logging + 异常 still exists if watchdog timeout
// 标准输入
FileOutputStream fileOutputStream = null; //
try {
fileOutputStream = new FileOutputStream(logFile, true);
PumpStreamHandler streamHandler = new PumpStreamHandler(fileOutputStream, fileOutputStream, null);
// command
CommandLine commandline = new CommandLine(command);
commandline.addArgument(scriptFile);
if (params!=null && params.length>0) {
commandline.addArguments(params);
}
// exec
DefaultExecutor exec = new DefaultExecutor();
exec.setExitValues(null);
exec.setStreamHandler(streamHandler);
int exitValue = exec.execute(commandline); // exit code: 0=success, 1=error
return exitValue;
} catch (Exception e) {
JobLogger.log(e);
return -1;
} finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
JobLogger.log(e);
}
}
}
}*/
}

View File

@@ -0,0 +1,43 @@
package com.platform.core.util;
public class ShardingUtil {
private static InheritableThreadLocal<ShardingVO> contextHolder = new InheritableThreadLocal<>();
public static class ShardingVO {
private int index; // sharding index
private int total; // sharding total
public ShardingVO(int index, int total) {
this.index = index;
this.total = total;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
}
public static void setShardingVo(ShardingVO shardingVo) {
contextHolder.set(shardingVo);
}
public static ShardingVO getShardingVo() {
return contextHolder.get();
}
}

View File

@@ -0,0 +1,853 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>service-data-dts-parent</artifactId>
<groupId>com.platform</groupId>
<version>0.4.x</version>
</parent>
<artifactId>service-data-dts</artifactId>
<packaging>jar</packaging>
<properties>
<java.version>1.8</java.version>
<spring-cloud.version>Hoxton.SR9</spring-cloud.version>
<druid.version>1.2.8</druid.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!--Mysql依赖包-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<!-- druid数据源驱动 -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>${druid.version}</version>
</dependency>
<!--eureka 客户端-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
<exclusions>
<exclusion>
<artifactId>spring-cloud-starter-netflix-archaius</artifactId>
<groupId>org.springframework.cloud</groupId>
</exclusion>
</exclusions>
</dependency>
<!--SpringBoot集成Web模块-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<artifactId>logback-classic</artifactId>
<groupId>ch.qos.logback</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-config</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
</dependency>
<dependency>
<groupId>com.platform</groupId>
<artifactId>common-service-api</artifactId>
<version>0.4.x</version>
</dependency>
<!-- Mybatis Plus -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatisplus.version}</version>
<exclusions>
<exclusion>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-generator</artifactId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatisplus.version}</version>
</dependency>
<!-- 接口管理 -->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${swagger.version}</version>
<exclusions>
<exclusion>
<groupId>io.swagger</groupId>
<artifactId>swagger-models</artifactId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-models</artifactId>
<version>${swagger-models.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>swagger-annotations</artifactId>
<groupId>io.swagger</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger-bootstrap-ui.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId>
<version>11.2.0.3</version>
<scope>system</scope>
<systemPath>${basedir}/src/main/lib/ojdbc6-11.2.0.3.jar</systemPath>
</dependency>
<!-- https://mvnrepository.com/artifact/net.sourceforge.jtds/jtds -->
<dependency>
<groupId>net.sourceforge.jtds</groupId>
<artifactId>jtds</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>com.sap.cloud.db.jdbc</groupId>
<artifactId>ngdbc</artifactId>
<version>2.3.48</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>sqljdbc4</artifactId>
<version>4.0</version>
<scope>system</scope>
<systemPath>${basedir}/src/main/lib/sqljdbc4-4.0.jar</systemPath>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j-api.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback-classic.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql-connector.version}</version>
</dependency>
<dependency>
<groupId>com.platform</groupId>
<artifactId>service-data-core</artifactId>
<version>${project.parent.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<!-- mail-starter -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt</artifactId>
<version>${jjwt.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>30.0-jre</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>jettison</artifactId>
<groupId>org.codehaus.jettison</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>commons-collections</artifactId>
<groupId>commons-collections</groupId>
</exclusion>
<exclusion>
<artifactId>commons-lang</artifactId>
<groupId>commons-lang</groupId>
</exclusion>
<exclusion>
<artifactId>curator-framework</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
<exclusions>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>jettison</artifactId>
<groupId>org.codehaus.jettison</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>curator-client</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>commons-compress</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-hdfs</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>snappy</artifactId>
<groupId>org.iq80.snappy</groupId>
</exclusion>
<exclusion>
<artifactId>antlr-runtime</artifactId>
<groupId>org.antlr</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-client</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>libthrift</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
<exclusion>
<artifactId>twill-common</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-core</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-discovery-api</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-discovery-core</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>twill-zookeeper</artifactId>
<groupId>org.apache.twill</groupId>
</exclusion>
<exclusion>
<artifactId>avro</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>curator-recipes</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-common</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-hadoop-compat</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-hadoop2-compat</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>curator-framework</artifactId>
<groupId>org.apache.curator</groupId>
</exclusion>
<exclusion>
<artifactId>guice-servlet</artifactId>
<groupId>com.google.inject.extensions</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-client</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-api</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-jaxrs</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-xc</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-client</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jamon-runtime</artifactId>
<groupId>org.jamon</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-annotations</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>commons-collections</artifactId>
<groupId>commons-collections</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-guice</artifactId>
<groupId>com.sun.jersey.contribs</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-slf4j-impl</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>hive-shims-common</artifactId>
<groupId>org.apache.hive.shims</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet</artifactId>
<groupId>org.eclipse.jetty.orbit</groupId>
</exclusion>
<exclusion>
<artifactId>jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>jasper-compiler</artifactId>
<groupId>tomcat</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-all</artifactId>
<groupId>org.eclipse.jetty.aggregate</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jetty</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-auth</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-annotations</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-protocol</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.mortbay.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
<version>1.3.5</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>${phoenix.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>commons-cli</artifactId>
<groupId>commons-cli</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>avro</artifactId>
<groupId>org.apache.avro</groupId>
</exclusion>
<exclusion>
<artifactId>guice</artifactId>
<groupId>com.google.inject</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-api</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-auth</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-core</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>guice-servlet</artifactId>
<groupId>com.google.inject.extensions</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-server</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>jersey-json</artifactId>
<groupId>com.sun.jersey</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-client</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>commons-io</artifactId>
<groupId>commons-io</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>zookeeper</artifactId>
<groupId>org.apache.zookeeper</groupId>
</exclusion>
<exclusion>
<artifactId>commons-math3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-annotations</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-hdfs</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-client</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-yarn-server-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>javax.ws.rs-api</artifactId>
<groupId>javax.ws.rs</groupId>
</exclusion>
<exclusion>
<artifactId>htrace-core</artifactId>
<groupId>org.apache.htrace</groupId>
</exclusion>
<exclusion>
<artifactId>jline</artifactId>
<groupId>jline</groupId>
</exclusion>
<exclusion>
<artifactId>fastutil</artifactId>
<groupId>it.unimi.dsi</groupId>
</exclusion>
<exclusion>
<artifactId>commons-lang</artifactId>
<groupId>commons-lang</groupId>
</exclusion>
<exclusion>
<artifactId>jsr305</artifactId>
<groupId>com.google.code.findbugs</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-common</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet.jsp-api</artifactId>
<groupId>javax.servlet.jsp</groupId>
</exclusion>
<exclusion>
<artifactId>hbase-server</artifactId>
<groupId>org.apache.hbase</groupId>
</exclusion>
<exclusion>
<artifactId>javax.servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-io</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-http</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-security</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-server</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-servlet</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>${mongo-java-driver.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.2.4</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<finalName>service-data-dts</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<!-- 跳过单元测试 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.4.0</version>
<executions>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,202 @@
#!/bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
SHELL_LOG="${DIR}/console.out"
SERVER_NAME="service-data-dts"
USER=`whoami`
SAFE_MODE=true
SUDO_USER=false
ENV_FILE_PATH="${DIR}/env.properties"
usage(){
printf "Configure usage:\n"
printf "\t%-10s %-10s %-2s \n" --server "server-name" "Name of service-data-dts server"
printf "\t%-10s %-10s %-2s \n" --unsafe "unsafe mode" "Will clean the directory existed"
printf "\t%-10s %-10s %-2s \n" --safe "safe mode" "Will not modify the directory existed (Default)"
printf "\t%-10s %-10s %-2s \n" "-h|--help" "usage" "List help document"
}
LOG(){
currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
}
interact_echo(){
while [ 1 ]; do
read -p "$1 (Y/N)" yn
if [ "${yn}x" == "Yx" ] || [ "${yn}x" == "yx" ]; then
return 0
elif [ "${yn}x" == "Nx" ] || [ "${yn}x" == "nx" ]; then
return 1
else
echo "Unknown choise: [$yn], please choose again."
fi
done
}
is_sudo_user(){
sudo -v >/dev/null 2>&1
}
abs_path(){
SOURCE="${BASH_SOURCE[0]}"
while [ -h "${SOURCE}" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "${SOURCE}")"
[[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
done
echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
}
check_exist(){
if test -e "$1"; then
LOG INFO "Directory or file: [$1] has been exist"
if [ $2 == true ]; then
LOG INFO "Configure program will shutdown..."
exit 0
fi
fi
}
copy_replace(){
file_name=$1
if test -e "${CONF_PATH}/${file_name}";then
if [ ${SAFE_MODE} == true ]; then
check_exist "${CONF_PATH}/${file_name}" true
fi
LOG INFO "Delete file or directory: [${CONF_PATH}/${file_name}]"
rm -rf ${CONF_PATH}/${file_name}
fi
if test -e "${DIR}/../conf/${file_name}";then
LOG INFO "Copy from ${DIR}/../conf/${file_name}"
cp -R ${DIR}/../conf/${file_name} ${CONF_PATH}/
fi
}
mkdir_p(){
if [ ${SAFE_MODE} == true ]; then
check_exist $1 false
fi
if [ ! -d $1 ]; then
LOG INFO "Creating directory: ["$1"]."
#mkdir -p $1
if [ ${SUDO_USER} == true ]; then
sudo mkdir -p $1 && sudo chown -R ${USER} $1
else
mkdir -p $1
fi
fi
}
while [ 1 ]; do
case ${!OPTIND} in
--server)
SERVER_NAME=$2
shift 2
;;
--unsafe)
SAFE_MODE=false
shift 1
;;
--safe)
SAFE_MODE=true
shift 1
;;
--help|-h)
usage
exit 0
;;
*)
break
;;
esac
done
is_sudo_user
if [ $? == 0 ]; then
SUDO_USER=true
fi
BIN=`abs_path`
SERVER_NAME_SIMPLE=${SERVER_NAME/dts-servie-/}
LOG_PATH=${BIN}/../logs
if [ "x${BASE_LOG_DIR}" != "x" ]; then
LOG_PATH=${BASE_LOG_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(WEB_LOG_PATH=)\S*!\1${LOG_PATH}!g" ${ENV_FILE_PATH}
fi
CONF_PATH=${BIN}/../conf
if [ "x${BASE_CONF_DIR}" != "x" ]; then
CONF_PATH=${BASE_CONF_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(WEB_CONF_PATH=)\S*!\1${CONF_PATH}!g" ${ENV_FILE_PATH}
fi
DATA_PATH=${BIN}/../data
if [ "x${BASE_DATA_DIR}" != "x" ]; then
DATA_PATH=${BASE_DATA_DIR}/${SERVER_NAME_SIMPLE}
sed -ri "s![#]?(DATA_PATH=)\S*!\1${DATA_PATH}!g" ${ENV_FILE_PATH}
fi
echo "Start to make directory"
# Start to make directory
LOG INFO "\033[1m Start to build directory\033[0m"
mkdir_p ${LOG_PATH}
mkdir_p ${CONF_PATH}
mkdir_p ${DATA_PATH}
if [ "x${BASE_CONF_DIR}" != "x" ]; then
LOG INFO "\033[1m Start to copy configuration file/directory\033[0m"
# Copy the configuration file
copy_replace bootstrap.properties
copy_replace application.yml
copy_replace logback.xml
copy_replace i18n
copy_replace mybatis-mapper
copy_replace static
fi
echo "end to make directory"
BOOTSTRAP_PROP_FILE="${CONF_PATH}/bootstrap.properties"
# Start to initalize database
echo "Start to initalize database"
if [ "x${SQL_SOURCE_PATH}" != "x" ] && [ -f "${SQL_SOURCE_PATH}" ]; then
`mysql --version >/dev/null 2>&1`
if [ $? == 0 ]; then
LOG INFO "\033[1m Scan out mysql command, so begin to initalize the database\033[0m"
interact_echo "Do you want to initalize database with sql: [${SQL_SOURCE_PATH}]?"
if [ $? == 0 ]; then
read -p "Please input the db host(default: 127.0.0.1): " HOST
if [ "x${HOST}" == "x" ]; then
HOST="127.0.0.1"
fi
while [ 1 ]; do
read -p "Please input the db port(default: 3306): " PORT
if [ "x${PORT}" == "x" ]; then
PORT=3306
break
elif [ ${PORT} -gt 0 ] 2>/dev/null; then
break
else
echo "${PORT} is not a number, please input again"
fi
done
read -p "Please input the db username(default: root): " USERNAME
if [ "x${USERNAME}" == "x" ]; then
USERNAME="root"
fi
read -p "Please input the db password(default: ""): " PASSWORD
read -p "Please input the db name(default: studio)" DATABASE
if [ "x${DATABASE}" == "x" ]; then
DATABASE="studio"
fi
mysql -h ${HOST} -P ${PORT} -u ${USERNAME} -p${PASSWORD} --default-character-set=utf8 -e \
"CREATE DATABASE IF NOT EXISTS ${DATABASE}; USE ${DATABASE}; source ${SQL_SOURCE_PATH};"
sed -ri "s![#]?(DB_HOST=)\S*!\1${HOST}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_PORT=)\S*!\1${PORT}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_USERNAME=)\S*!\1${USERNAME}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_PASSWORD=)\S*!\1${PASSWORD}!g" ${BOOTSTRAP_PROP_FILE}
sed -ri "s![#]?(DB_DATABASE=)\S*!\1${DATABASE}!g" ${BOOTSTRAP_PROP_FILE}
fi
fi
fi

View File

@@ -0,0 +1,9 @@
@echo off
set home=%~dp0
set conf_dir=%home%..\conf
set lib_dir=%home%..\lib\*
set log_dir=%home%..\logs
echo %conf_dir%
java -Dspring.profiles.active=standalone -Dlogging.file=%log_dir%\dbApi.log -classpath %conf_dir%;%lib_dir% com.platform.admin.Engine
pause

View File

@@ -0,0 +1,277 @@
#!/bin/bash
#
FRIEND_NAME=FLINKX-ADMIN
MAIN_CLASS=com.platform.admin.Engine
if [ ! ${ENV_FILE} ]; then
ENV_FILE="env.properties"
fi
SLEEP_TIMEREVAL_S=2
abs_path(){
SOURCE="${BASH_SOURCE[0]}"
while [ -h "${SOURCE}" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "${SOURCE}")"
[[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}"
done
echo "$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
}
function LOG(){
currentTime=`date "+%Y-%m-%d %H:%M:%S.%3N"`
echo -e "$currentTime [${1}] ($$) $2" | tee -a ${SHELL_LOG}
}
verify_java_env(){
if [ "x${JAVA_HOME}" != "x" ]; then
${JAVA_HOME}/bin/java -version >/dev/null 2>&1
else
java -version >/dev/null 2>&1
fi
if [ $? -ne 0 ]; then
cat 1>&2 <<EOF
+========================================================================+
| Error: Java Environment is not availiable, Please check your JAVA_HOME |
+------------------------------------------------------------------------+
EOF
return 1
fi
return 0
}
load_env(){
LOG INFO "load environment variables"
while read line
do
if [[ ! -z $(echo "${line}" | grep "=") ]]; then
key=${line%%=*}
value=${line#*=}
key1=$(echo ${key} | tr '.' '_')
if [ -z $(echo "${key1}" | grep -P '\s*#+.*') ]; then
eval "${key1}=${value}"
fi
fi
done < "${BIN}/${ENV_FILE}"
}
BIN=`abs_path`
SHELL_LOG="${BIN}/console.out"
load_env
#verify environment
verify_java_env
if [ $? -ne 0 ]; then
exit $?
fi
if [[ ! ${SERVICE_LOG_PATH} ]]; then
SERVICE_LOG_PATH=${BIN}/../logs
fi
if [[ ! ${SERVICE_CONF_PATH} ]]; then
SERVICE_CONF_PATH=${BIN}/../conf
fi
if [[ ! ${DATA_PATH} ]]; then
DATA_PATH=${BIN}/../data
fi
if [[ ! ${MAIL_USERNAME} ]]; then
MAIL_USERNAME="flinkx"
fi
if [[ ! ${MAIL_PASSWORD} ]]; then
MAIL_PASSWORD="123456"
fi
if [[ ! ${JAVA_OPTS} ]]; then
JAVA_OPTS=" -Xms2g -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8"
fi
if [[ ! ${REMOTE_DEBUG_SWITCH} ]]; then
REMOTE_DEBUG_SWITCH=false
fi
if [[ ! ${REMOTE_DEBUG_PORT} ]]; then
REMOTE_DEBUG_PORT="8089"
fi
LIB_PATH=${BIN}/../lib
USER_DIR=${BIN}/../
CLASSPATH=${LIB_PATH}"/*:"${SERVICE_CONF_PATH}":."
if [ ${REMOTE_DEBUG_SWITCH} == true ]; then
JAVA_OPTS=${JAVA_OPTS}" -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${REMOTE_DEBUG_PORT}"
fi
JAVA_OPTS=${JAVA_OPTS}" -XX:HeapDumpPath="${SERVICE_LOG_PATH}" -Dlog.path="${SERVICE_LOG_PATH}
JAVA_OPTS=${JAVA_OPTS}" -Duser.dir="${USER_DIR}
JAVA_OPTS=${JAVA_OPTS}" -Ddata.path="${DATA_PATH}" -Dmail.username="${MAIL_USERNAME}" -Dmail.password="${MAIL_PASSWORD}
if [ "x"${PID_FILE_PATH} != "x" ]; then
JAVA_OPTS=${JAVA_OPTS}" -Dpid.file="${PID_FILE_PATH}
fi
JAVA_OPTS=${JAVA_OPTS}" -Dlogging.config="${SERVICE_CONF_PATH}"/logback.xml"
JAVA_OPTS=${JAVA_OPTS}" -classpath "${CLASSPATH}
if [ "x${JAVA_HOME}" != "x" ]; then
EXE_JAVA=${JAVA_HOME}"/bin/java "${JAVA_OPTS}" "${MAIN_CLASS}
JPS=${JAVA_HOME}/bin/jps
else
EXE_JAVA="java "${JAVA_OPTS}" "${MAIN_CLASS}
JPS="jps"
fi
usage(){
echo " usage is [start|stop|shutdown|restart]"
}
# check if the process still in jvm
status_class(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "$2" | awk '{print $1}'`
fi
if [ -n "$p" ]; then
# echo "$1 ($2) is still running with pid $p"
return 0
else
# echo "$1 ($2) does not appear in the java process table"
return 1
fi
}
wait_for_startup(){
local now_s=`date '+%s'`
local stop_s=$((${now_s} + $1))
while [ ${now_s} -le ${stop_s} ];do
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 0 ]; then
return 0
fi
sleep ${SLEEP_TIMEREVAL_S}
now_s=`date '+%s'`
done
exit 1
}
wait_for_stop(){
local now_s=`date '+%s'`
local stop_s=$((${now_s} + $1))
while [ ${now_s} -le ${stop_s} ];do
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 1 ]; then
return 0
fi
sleep ${SLEEP_TIMEREVAL_S}
now_s=`date '+%s'`
done
return 1
}
start_m(){
status_class ${FRIEND_NAME} ${MAIN_CLASS}
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} has been started in process"
exit 0
fi
LOG INFO ${EXE_JAVA}
nohup ${EXE_JAVA} >${SHELL_LOG} 2>&1 &
LOG INFO "Waiting ${FRIEND_NAME} to start complete ..."
wait_for_startup 20
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} start success"
return 0
else
LOG ERROR "${FRIEND_NAME} start exceeded over 20s" >&2
return 1
fi
}
stop_m(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
fi
if [ -z "${p}" ]; then
LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
return 0
fi
LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
case "`uname`" in
CYCGWIN*) taskkill /PID "${p}" ;;
*) kill -SIGTERM "${p}" ;;
esac
LOG INFO "Waiting ${FRIEND_NAME} to stop complete ..."
wait_for_stop 20
if [ $? -eq 0 ]; then
LOG INFO "${FRIEND_NAME} stop success"
return 0
else
LOG ERROR "${FRIEND_NAME} stop exceeded over 20s" >&2
return 1
fi
}
shutdown_m(){
local p=""
if [ "x"${PID_FILE_PATH} != "x" ]; then
if [ -f ${PID_FILE_PATH} ]; then
local pid_in_file=`cat ${PID_FILE_PATH} 2>/dev/null`
if [ "x"${pid_in_file} != "x" ]; then
p=`${JPS} -q | grep ${pid_in_file} | awk '{print $1}'`
fi
fi
else
p=`${JPS} -l | grep "${MAIN_CLASS}" | awk '{print $1}'`
fi
if [ -z "${p}" ]; then
LOG INFO "${FRIEND_NAME} didn't start successfully, not found in the java process table"
return 0
fi
LOG INFO "Killing ${FRIEND_NAME} (pid ${p}) ..."
case "`uname`" in
CYCGWIN*) taskkill /F /PID "${p}" ;;
*) kill -9 "${p}" ;;
esac
}
restart_m(){
stop_m
if [ $? -eq 0 ]; then
start_m
exit $?
else
LOG ERROR "${FRIEND_NAME} restart fail" >&2
exit 1
fi
}
if [ ! $1 ]; then
usage
exit 1;
fi
case $1 in
start) start_m;;
stop) stop_m;;
shutdown) shutdown_m;;
restart) restart_m;;
*)
usage
exit 1
;;
esac
exit $?

View File

@@ -0,0 +1,21 @@
# environment variables
#JAVA_HOME=""
WEB_LOG_PATH=${BIN}/../logs
WEB_CONF_PATH=${BIN}/../conf
DATA_PATH=${BIN}/../data
SERVER_PORT=8080
#PID_FILE_PATH=${BIN}/flinkxadmin.pid
# mail account
MAIL_USERNAME=""
MAIL_PASSWORD=""
#debug
#REMOTE_DEBUG_SWITCH=true
#REMOTE_DEBUG_PORT=7003

View File

@@ -0,0 +1,35 @@
package com.platform.admin;
import com.platform.admin.entity.Common;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.cloud.client.SpringCloudApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.core.env.Environment;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
import java.net.InetAddress;
import java.net.UnknownHostException;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* Engine是入口类该类负责数据的初始化
**/
@EnableSwagger2
@SpringCloudApplication
@EnableFeignClients(basePackages = {"cn.datax.service.system.api.feign"})
@EnableAutoConfiguration(exclude={MongoAutoConfiguration.class})
public class DataDtsServiceApplication {
public static void main(String[] args) {
SpringApplication.run(DataDtsServiceApplication.class);
}
}

View File

@@ -0,0 +1,31 @@
package com.platform.admin.base;
import cn.datax.service.system.api.dto.JwtUserDto;
import cn.datax.service.system.api.feign.UserServiceFeign;
import com.baomidou.mybatisplus.extension.api.ApiController;
import com.platform.admin.util.JwtTokenUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import java.util.Enumeration;
import static com.platform.core.util.Constants.STRING_BLANK;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* base controller
**/
@Component
public class BaseController extends ApiController {
@Autowired
UserServiceFeign userServiceFeign;
public Long getCurrentUserId(HttpServletRequest request) {
return 1L;
}
}

View File

@@ -0,0 +1,252 @@
package com.platform.admin.base;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.platform.admin.util.PageUtils;
import com.platform.admin.util.ServletUtils;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpServletRequest;
import java.net.URLDecoder;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 基础参数辅助类
**/
@Slf4j
public class BaseForm {
/**
* 查询参数对象
*/
protected Map<String, Object> values = new LinkedHashMap<>();
/**
* 当前页码
*/
private Long current = 1L;
/**
* 页大小
*/
private Long size = 10L;
/**
* 构造方法
*/
public BaseForm() {
try {
HttpServletRequest request = ServletUtils.getRequest();
Enumeration<String> params = request.getParameterNames();
while (params.hasMoreElements()) {
String name = params.nextElement();
String value = StrUtil.trim(request.getParameter(name));
this.set(name, URLDecoder.decode(value, "UTF-8"));
}
this.parsePagingQueryParams();
} catch (Exception e) {
e.printStackTrace();
log.error("BaseControlForm initialize parameters setting error" + e);
}
}
/**
* 获取页码
*
* @return
*/
public Long getPageNo() {
String pageNum = StrUtil.toString(this.get("current"));
if (!StrUtil.isEmpty(pageNum) && NumberUtil.isNumber(pageNum)) {
this.current = Long.parseLong(pageNum);
}
return this.current;
}
/**
* 获取页大小
*
* @return
*/
public Long getPageSize() {
String pageSize = StrUtil.toString(this.get("size"));
if (StrUtil.isNotEmpty(pageSize) && NumberUtil.isNumber(pageSize) && !"null".equalsIgnoreCase(pageSize)) {
this.size = Long.parseLong(pageSize);
}
return this.size;
}
/**
* 获得参数信息对象
*
* @return
*/
public Map<String, Object> getParameters() {
return values;
}
/**
* 根据key获取values中的值
*
* @param name
* @return
*/
public Object get(String name) {
if (values == null) {
values = new LinkedHashMap<>();
return null;
}
return this.values.get(name);
}
/**
* 根据key获取values中String类型值
*
* @param key
* @return String
*/
public String getString(String key) {
return StrUtil.toString(get(key));
}
/**
* 获取排序字段
*
* @return
*/
public String getSort() {
return StrUtil.toString(this.values.get("sort"));
}
/**
* 获取排序
*
* @return
*/
public String getOrder() {
return StrUtil.toString(this.values.get("order"));
}
/**
* 获取排序
*
* @return
*/
public String getOrderby() {
return StrUtil.toString(this.values.get("orderby"));
}
/**
* 解析出mybatis plus分页查询参数
*/
public Page getPlusPagingQueryEntity() {
Page page = new Page();
//如果无current默认返回1000条数据
page.setCurrent(this.getPageNo());
page.setSize(this.getPageSize());
if (ObjectUtil.isNotNull(this.get("ifCount"))) {
page.setSearchCount(BooleanUtil.toBoolean(this.getString("ifCount")));
} else {
//默认给true
page.setSearchCount(true);
}
return page;
}
/**
* 解析分页排序参数pageHelper
*/
public void parsePagingQueryParams() {
// 排序字段解析
String orderBy = StrUtil.toString(this.get("orderby")).trim();
String sortName = StrUtil.toString(this.get("sort")).trim();
String sortOrder = StrUtil.toString(this.get("order")).trim().toLowerCase();
if (StrUtil.isEmpty(orderBy) && !StrUtil.isEmpty(sortName)) {
if (!sortOrder.equals("asc") && !sortOrder.equals("desc")) {
sortOrder = "asc";
}
this.set("orderby", sortName + " " + sortOrder);
}
}
/**
* 设置参数
*
* @param name 参数名称
* @param value 参数值
*/
public void set(String name, Object value) {
if (ObjectUtil.isNotNull(value)) {
this.values.put(name, value);
}
}
/**
* 移除参数
*
* @param name
*/
public void remove(String name) {
this.values.remove(name);
}
/**
* 清除所有参数
*/
public void clear() {
if (values != null) {
values.clear();
}
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
public QueryWrapper<?> pageQueryWrapperCustom(Map<String, Object> map, QueryWrapper<?> queryWrapper) {
// mybatis plus 分页相关的参数
Map<String, Object> pageParams = PageUtils.filterPageParams(map);
//过滤空值,分页查询相关的参数
Map<String, Object> colQueryMap = PageUtils.filterColumnQueryParams(map);
//排序 操作
pageParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
colQueryMap.forEach((k, v) -> {
switch (k) {
case "pluginName":
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@@ -0,0 +1,45 @@
package com.platform.admin.config;
import com.baomidou.mybatisplus.core.injector.DefaultSqlInjector;
import com.baomidou.mybatisplus.core.injector.ISqlInjector;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* @Description:
**/
@EnableTransactionManagement
@Configuration
@MapperScan("com.platform.admin.mapper")
public class MybatisPlusConfig {
/**
* 分页插件
*/
@Bean
public PaginationInterceptor paginationInterceptor() {
PaginationInterceptor paginationInterceptor = new PaginationInterceptor();
return paginationInterceptor.setOverflow(true);
}
/**
* MyBatisPlus逻辑删除 ,需要在 yml 中配置开启
* 3.0.7.1版本的LogicSqlInjector里面什么都没做只是 extends DefaultSqlInjector
* 以后版本直接去的了LogicSqlInjector
*
* @return
*/
@Bean
public ISqlInjector sqlInjector() {
return new DefaultSqlInjector();
}
}

View File

@@ -0,0 +1,272 @@
package com.platform.admin.config;
import com.google.common.collect.Multimap;
import io.swagger.models.*;
import io.swagger.models.parameters.Parameter;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
import org.springframework.context.MessageSource;
import org.springframework.context.annotation.Primary;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.stereotype.Component;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.ApiListing;
import springfox.documentation.service.Documentation;
import springfox.documentation.service.ResourceListing;
import springfox.documentation.swagger2.mappers.*;
import java.util.*;
import static com.google.common.collect.Maps.newTreeMap;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* @Description:
**/
@Component(value = "ServiceModelToSwagger2Mapper")
@Primary
@ConditionalOnWebApplication
public class ServiceModelToSwagger2MapperImpl extends ServiceModelToSwagger2Mapper {
@Autowired
private ModelMapper modelMapper;
@Autowired
private ParameterMapper parameterMapper;
@Autowired
private SecurityMapper securityMapper;
@Autowired
private LicenseMapper licenseMapper;
@Autowired
private VendorExtensionsMapper vendorExtensionsMapper;
@Autowired
private MessageSource messageSource;
@Override
public Swagger mapDocumentation(Documentation from) {
if (from == null) {
return null;
}
Swagger swagger = new Swagger();
swagger.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
swagger.setSchemes(mapSchemes(from.getSchemes()));
swagger.setPaths(mapApiListings(from.getApiListings()));
swagger.setHost(from.getHost());
swagger.setDefinitions(modelsFromApiListings( from.getApiListings() ) );
swagger.setSecurityDefinitions(securityMapper.toSecuritySchemeDefinitions(from.getResourceListing()));
ApiInfo info = fromResourceListingInfo(from);
if (info != null) {
swagger.setInfo(mapApiInfo(info));
}
swagger.setBasePath(from.getBasePath());
swagger.setTags(tagSetToTagList(from.getTags()));
List<String> list2 = from.getConsumes();
if (list2 != null) {
swagger.setConsumes(new ArrayList<String>(list2));
} else {
swagger.setConsumes(null);
}
List<String> list3 = from.getProduces();
if (list3 != null) {
swagger.setProduces(new ArrayList<String>(list3));
} else {
swagger.setProduces(null);
}
return swagger;
}
@Override
protected Info mapApiInfo(ApiInfo from) {
if (from == null) {
return null;
}
Info info = new Info();
info.setLicense(licenseMapper.apiInfoToLicense(from));
info.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
info.setTermsOfService(from.getTermsOfServiceUrl());
info.setContact(map(from.getContact()));
info.setDescription(from.getDescription());
info.setVersion(from.getVersion());
info.setTitle(from.getTitle());
return info;
}
@Override
protected Contact map(springfox.documentation.service.Contact from) {
if (from == null) {
return null;
}
Contact contact = new Contact();
contact.setName(from.getName());
contact.setUrl(from.getUrl());
contact.setEmail(from.getEmail());
return contact;
}
@Override
protected Operation mapOperation(springfox.documentation.service.Operation from) {
if (from == null) {
return null;
}
Locale locale = LocaleContextHolder.getLocale();
Operation operation = new Operation();
operation.setSecurity(mapAuthorizations(from.getSecurityReferences()));
operation.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
operation.setDescription(messageSource.getMessage(from.getNotes(), null, from.getNotes(), locale));
operation.setOperationId(from.getUniqueId());
operation.setResponses(mapResponseMessages(from.getResponseMessages()));
operation.setSchemes(stringSetToSchemeList(from.getProtocol()));
Set<String> tagsSet = new HashSet<>(1);
if(from.getTags() != null && from.getTags().size() > 0){
List<String> list = new ArrayList<String>(tagsSet.size());
Iterator<String> it = from.getTags().iterator();
while(it.hasNext()){
String tag = it.next();
list.add(
StringUtils.isNotBlank(tag) ? messageSource.getMessage(tag, null, tag, locale) : " ");
}
operation.setTags(list);
}else {
operation.setTags(null);
}
operation.setSummary(from.getSummary());
Set<String> set1 = from.getConsumes();
if (set1 != null) {
operation.setConsumes(new ArrayList<String>(set1));
} else {
operation.setConsumes(null);
}
Set<String> set2 = from.getProduces();
if (set2 != null) {
operation.setProduces(new ArrayList<String>(set2));
} else {
operation.setProduces(null);
}
operation.setParameters(parameterListToParameterList(from.getParameters()));
if (from.getDeprecated() != null) {
operation.setDeprecated(Boolean.parseBoolean(from.getDeprecated()));
}
return operation;
}
@Override
protected Tag mapTag(springfox.documentation.service.Tag from) {
if (from == null) {
return null;
}
Locale locale = LocaleContextHolder.getLocale();
Tag tag = new Tag();
tag.setVendorExtensions(vendorExtensionsMapper.mapExtensions(from.getVendorExtensions()));
tag.setName(messageSource.getMessage(from.getName(), null, from.getName(), locale));
tag.setDescription(from.getDescription());
return tag;
}
private ApiInfo fromResourceListingInfo(Documentation documentation) {
if (documentation == null) {
return null;
}
ResourceListing resourceListing = documentation.getResourceListing();
if (resourceListing == null) {
return null;
}
ApiInfo info = resourceListing.getInfo();
if (info == null) {
return null;
}
return info;
}
protected List<Tag> tagSetToTagList(Set<springfox.documentation.service.Tag> set) {
if (set == null) {
return null;
}
List<Tag> list = new ArrayList<Tag>(set.size());
for (springfox.documentation.service.Tag tag : set) {
list.add(mapTag(tag));
}
return list;
}
protected List<Scheme> stringSetToSchemeList(Set<String> set) {
if (set == null) {
return null;
}
List<Scheme> list = new ArrayList<Scheme>(set.size());
for (String string : set) {
list.add(Enum.valueOf(Scheme.class, string));
}
return list;
}
protected List<Parameter> parameterListToParameterList(List<springfox.documentation.service.Parameter> list) {
if (list == null) {
return null;
}
List<Parameter> list1 = new ArrayList<Parameter>(list.size());
Locale locale = LocaleContextHolder.getLocale();
for (springfox.documentation.service.Parameter param : list) {
String description = messageSource.getMessage(param.getDescription(), null, param.getDescription(), locale);
springfox.documentation.service.Parameter parameter = new springfox.documentation.service.Parameter(param.getName(),description,param.getDefaultValue(),param.isRequired(),param.isAllowMultiple(),param.isAllowEmptyValue(),param.getModelRef(),param.getType(),param.getAllowableValues(),param.getParamType(),param.getParamAccess(),param.isHidden(),param.getPattern(),param.getCollectionFormat(),param.getOrder(),param.getScalarExample(),param.getExamples() ,param.getVendorExtentions());
list1.add(parameterMapper.mapParameter(parameter));
}
return list1;
}
Map<String, Model> modelsFromApiListings(Multimap<String, ApiListing> apiListings) {
Map<String, springfox.documentation.schema.Model> definitions = newTreeMap();
for (ApiListing each : apiListings.values()) {
definitions.putAll(each.getModels());
}
return modelMapper.mapModels(definitions);
}
}

View File

@@ -0,0 +1,41 @@
package com.platform.admin.config;
import com.github.xiaoymin.swaggerbootstrapui.annotations.EnableSwaggerBootstrapUI;
import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* Swagger2API文档的配置
**/
@Configuration
@EnableSwagger2
@EnableSwaggerBootstrapUI
@ConditionalOnWebApplication
public class SwaggerConfig implements WebMvcConfigurer {
@Bean
public Docket createRestApi() {
return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo()).select()
.apis(RequestHandlerSelectors.basePackage("com.guoliang.flinkx.admin.controller")).paths(PathSelectors.any())
.build();
}
private ApiInfo apiInfo() {
return new ApiInfoBuilder().title("FlinkX Web Api Docs").description("FlinkX Web Api Docs")
.build();
}
}

View File

@@ -0,0 +1,24 @@
package com.platform.admin.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
@Configuration
@EnableWebSecurity(debug = false)
public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter {
@Bean
public BCryptPasswordEncoder bCryptPasswordEncoder() {
return new BCryptPasswordEncoder();
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().disable().authorizeRequests().anyRequest().permitAll().and().logout().permitAll();
}
}

View File

@@ -0,0 +1,51 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.dto.FlinkXJsonBuildDto;
import com.platform.admin.service.FlinkxJsonService;
import com.platform.admin.core.util.I18nUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 组装flinkx的json的控制器
**/
@RestController
@RequestMapping("/flinkxJson")
@Api(tags = "组装flinkx json的控制器")
public class FlinkxJsonController extends BaseController {
@Autowired
private FlinkxJsonService flinkxJsonService;
@PostMapping("/buildJson")
@ApiOperation("JSON构建")
public R<String> buildJobJson(@RequestBody FlinkXJsonBuildDto dto) {
String key = "system_please_choose";
if (dto.getReaderDatasourceId() == null) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerDataSource"));
}
if (dto.getWriterDatasourceId() == null) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerDataSource"));
}
if (CollectionUtils.isEmpty(dto.getReaderColumns())) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_readerColumns"));
}
if (CollectionUtils.isEmpty(dto.getWriterColumns())) {
return failed(I18nUtil.getString(key) + I18nUtil.getString("jobinfo_field_writerColumns"));
}
return success(flinkxJsonService.buildJobJson(dto));
}
}

View File

@@ -0,0 +1,52 @@
package com.platform.admin.controller;
import com.platform.admin.service.JobService;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.ReturnT;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 首页接口
**/
@RestController
@Api(tags = "首页接口")
@RequestMapping("/api")
public class IndexController {
@Resource
private JobService jobService;
@GetMapping("/index")
@ApiOperation("监控图")
public ReturnT<Map<String, Object>> index() {
return new ReturnT<>(jobService.dashboardInfo());
}
@PostMapping("/chartInfo")
@ApiOperation("图表信息")
public ReturnT<Map<String, Object>> chartInfo() {
return jobService.chartInfo();
}
@InitBinder
public void initBinder(WebDataBinder binder) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
dateFormat.setLenient(false);
binder.registerCustomEditor(Date.class, new CustomDateEditor(dateFormat, true));
}
}

View File

@@ -0,0 +1,147 @@
package com.platform.admin.controller;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.util.JacksonUtil;
import com.platform.core.biz.AdminBiz;
import com.platform.core.biz.model.HandleCallbackParam;
import com.platform.core.biz.model.HandleProcessCallbackParam;
import com.platform.core.biz.model.RegistryParam;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.JobRemotingUtil;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* API的接口
**/
@RestController
@RequestMapping("/api")
public class JobApiController {
@Resource
private AdminBiz adminBiz;
/**
* callback
*
* @param data
* @return
*/
@RequestMapping("/callback")
public ReturnT<String> callback(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
List<HandleCallbackParam> callbackParamList = null;
try {
callbackParamList = JacksonUtil.readValue(data, List.class, HandleCallbackParam.class);
} catch (Exception e) { }
if (callbackParamList==null || callbackParamList.size()==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.callback(callbackParamList);
}
/**
* callback
*
* @param data
* @return
*/
@RequestMapping("/processCallback")
public ReturnT<String> processCallback(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
List<HandleProcessCallbackParam> callbackParamList = null;
try {
callbackParamList = JacksonUtil.readValue(data, List.class, HandleProcessCallbackParam.class);
} catch (Exception e) { }
if (callbackParamList==null || callbackParamList.size()==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.processCallback(callbackParamList);
}
/**
* registry
*
* @param data
* @return
*/
@RequestMapping("/registry")
public ReturnT<String> registry(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
RegistryParam registryParam = null;
try {
registryParam = JacksonUtil.readValue(data, RegistryParam.class);
} catch (Exception e) {}
if (registryParam == null) {
return new ReturnT<String>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.registry(registryParam);
}
/**
* registry remove
*
* @param data
* @return
*/
@RequestMapping("/registryRemove")
public ReturnT<String> registryRemove(HttpServletRequest request, @RequestBody(required = false) String data) {
// valid
if (JobAdminConfig.getAdminConfig().getAccessToken()!=null
&& JobAdminConfig.getAdminConfig().getAccessToken().trim().length()>0
&& !JobAdminConfig.getAdminConfig().getAccessToken().equals(request.getHeader(JobRemotingUtil.XXL_RPC_ACCESS_TOKEN))) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The access token is wrong.");
}
// param
RegistryParam registryParam = null;
try {
registryParam = JacksonUtil.readValue(data, RegistryParam.class);
} catch (Exception e) {}
if (registryParam == null) {
return new ReturnT<>(ReturnT.FAIL_CODE, "The request data invalid.");
}
// invoke
return adminBiz.registryRemove(registryParam);
}
}

View File

@@ -0,0 +1,78 @@
package com.platform.admin.controller;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobLogGlueMapper;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLogGlue;
import com.platform.core.biz.model.ReturnT;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.Date;
import static com.platform.core.biz.model.ReturnT.FAIL_CODE;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务状态接口
**/
@RestController
@RequestMapping("/jobcode")
@Api(tags = "任务状态接口")
public class JobCodeController {
@Resource
private JobInfoMapper jobInfoMapper;
@Resource
private JobLogGlueMapper jobLogGlueMapper;
@RequestMapping(value = "/save", method = RequestMethod.POST)
@ApiOperation("保存任务状态")
public ReturnT<String> save(Model model, int id, String glueSource, String glueRemark) {
// valid
if (glueRemark == null) {
return new ReturnT<>(FAIL_CODE, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobinfo_glue_remark")));
}
if (glueRemark.length() < 4 || glueRemark.length() > 100) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_remark_limit"));
}
JobInfo existsJobInfo = jobInfoMapper.loadById(id);
if (existsJobInfo == null) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
}
// update new code
existsJobInfo.setGlueSource(glueSource);
existsJobInfo.setGlueRemark(glueRemark);
existsJobInfo.setGlueUpdatetime(new Date());
existsJobInfo.setUpdateTime(new Date());
jobInfoMapper.update(existsJobInfo);
// log old code
JobLogGlue jobLogGlue = new JobLogGlue();
jobLogGlue.setJobId(existsJobInfo.getId());
jobLogGlue.setGlueType(existsJobInfo.getGlueType());
jobLogGlue.setGlueSource(glueSource);
jobLogGlue.setGlueRemark(glueRemark);
jobLogGlue.setAddTime(new Date());
jobLogGlue.setUpdateTime(new Date());
jobLogGlueMapper.save(jobLogGlue);
// remove code backup more than 30
jobLogGlueMapper.removeOld(existsJobInfo.getId(), 30);
return ReturnT.SUCCESS;
}
}

View File

@@ -0,0 +1,136 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.platform.admin.base.BaseController;
import com.platform.admin.base.BaseForm;
import com.platform.admin.service.JobDatasourceService;
import com.platform.admin.core.util.LocalCacheUtil;
import com.platform.admin.entity.JobDatasource;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* jdbc数据源配置控制器层
**/
@RestController
@RequestMapping("/jobJdbcDatasource")
@Api(tags = "jdbc数据源配置接口")
public class JobDatasourceController extends BaseController {
/**
* 服务对象
*/
@Autowired
private JobDatasourceService jobJdbcDatasourceService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@ApiImplicitParams(
{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
})
public R<IPage<JobDatasource>> selectAll() {
BaseForm form = new BaseForm();
QueryWrapper<JobDatasource> query = (QueryWrapper<JobDatasource>) form.pageQueryWrapperCustom(form.getParameters(), new QueryWrapper<JobDatasource>());
Page jdbcList = jobJdbcDatasourceService.page(form.getPlusPagingQueryEntity(), query);
return success(jdbcList);
}
/**
* 获取所有数据源
* @return
*/
@ApiOperation("获取所有数据源")
@GetMapping("/all")
public R<List<JobDatasource>> selectAllDatasource() {
return success(this.jobJdbcDatasourceService.selectAllDatasource());
}
/**
* 通过主键查询单条数据
*
* @param id 主键
* @return 单条数据
*/
@ApiOperation("通过主键查询单条数据")
@GetMapping("{id}")
public R<JobDatasource> selectOne(@PathVariable Serializable id) {
return success(this.jobJdbcDatasourceService.getById(id));
}
/**
* 新增数据
*
* @param entity 实体对象
* @return 新增结果
*/
@ApiOperation("新增数据")
@PostMapping
public R<Boolean> insert(@RequestBody JobDatasource entity) {
return success(this.jobJdbcDatasourceService.save(entity));
}
/**
* 修改数据
*
* @param entity 实体对象
* @return 修改结果
*/
@PutMapping
@ApiOperation("修改数据")
public R<Boolean> update(@RequestBody JobDatasource entity) {
LocalCacheUtil.remove(entity.getDatasourceName());
JobDatasource d = jobJdbcDatasourceService.getById(entity.getId());
if (null != d.getJdbcUsername() && entity.getJdbcUsername().equals(d.getJdbcUsername())) {
entity.setJdbcUsername(null);
}
if (null != entity.getJdbcPassword() && entity.getJdbcPassword().equals(d.getJdbcPassword())) {
entity.setJdbcPassword(null);
}
return success(this.jobJdbcDatasourceService.updateById(entity));
}
/**
* 删除数据
*
* @param idList 主键结合
* @return 删除结果
*/
@DeleteMapping
@ApiOperation("删除数据")
public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
return success(this.jobJdbcDatasourceService.removeByIds(idList));
}
/**
* 测试数据源
* @param jobJdbcDatasource
* @return
*/
@PostMapping("/test")
@ApiOperation("测试数据")
public R<Boolean> dataSourceTest (@RequestBody JobDatasource jobJdbcDatasource) throws IOException {
return success(jobJdbcDatasourceService.dataSourceTest(jobJdbcDatasource));
}
}

View File

@@ -0,0 +1,175 @@
package com.platform.admin.controller;
import com.platform.admin.mapper.JobGroupMapper;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobRegistryMapper;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.enums.RegistryConfig;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobRegistry;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.util.*;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 执行器管理接口
**/
@RestController
@RequestMapping("/jobGroup")
@Api(tags = "执行器管理接口")
public class JobGroupController {
@Resource
public JobInfoMapper jobInfoMapper;
@Resource
public JobGroupMapper jobGroupMapper;
@Resource
private JobRegistryMapper jobRegistryMapper;
@GetMapping("/list")
@ApiOperation("执行器列表")
public ReturnT<List<JobGroup>> getExecutorList() {
return new ReturnT<>(jobGroupMapper.findAll());
}
@PostMapping("/save")
@ApiOperation("新建执行器")
public ReturnT<String> save(@RequestBody JobGroup jobGroup) {
// valid
if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
}
if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
}
if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
}
if (jobGroup.getAddressType() != 0) {
if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
}
String[] addresses = jobGroup.getAddressList().split(",");
for (String item : addresses) {
if (item == null || item.trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
}
}
}
int ret = jobGroupMapper.save(jobGroup);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
@PostMapping("/update")
@ApiOperation("更新执行器")
public ReturnT<String> update(@RequestBody JobGroup jobGroup) {
// valid
if (jobGroup.getAppName() == null || jobGroup.getAppName().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + "AppName"));
}
if (jobGroup.getAppName().length() < 4 || jobGroup.getAppName().length() > 64) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_appName_length"));
}
if (jobGroup.getTitle() == null || jobGroup.getTitle().trim().length() == 0) {
return new ReturnT<String>(500, (I18nUtil.getString("system_please_input") + I18nUtil.getString("jobgroup_field_title")));
}
if (jobGroup.getAddressType() == 0) {
// 0=自动注册
List<String> registryList = findRegistryByAppName(jobGroup.getAppName());
String addressListStr = null;
if (registryList != null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item : registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length() - 1);
}
jobGroup.setAddressList(addressListStr);
} else {
// 1=手动录入
if (jobGroup.getAddressList() == null || jobGroup.getAddressList().trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_addressType_limit"));
}
String[] addresses = jobGroup.getAddressList().split(",");
for (String item : addresses) {
if (item == null || item.trim().length() == 0) {
return new ReturnT<String>(500, I18nUtil.getString("jobgroup_field_registryList_invalid"));
}
}
}
int ret = jobGroupMapper.update(jobGroup);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
private List<String> findRegistryByAppName(String appNameParam) {
HashMap<String, List<String>> appAddressMap = new HashMap<>();
List<JobRegistry> list = jobRegistryMapper.findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (JobRegistry item : list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appName = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appName);
if (registryList == null) {
registryList = new ArrayList<>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appName, registryList);
}
}
}
return appAddressMap.get(appNameParam);
}
@PostMapping("/remove")
@ApiOperation("移除执行器")
public ReturnT<String> remove(int id) {
// valid
int count = jobInfoMapper.pageListCount(0, 10, id, -1, null, null, 0,null);
if (count > 0) {
return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_0"));
}
List<JobGroup> allList = jobGroupMapper.findAll();
if (allList.size() == 1) {
return new ReturnT<>(500, I18nUtil.getString("jobgroup_del_limit_1"));
}
int ret = jobGroupMapper.remove(id);
return (ret > 0) ? ReturnT.SUCCESS : ReturnT.FAIL;
}
@RequestMapping(value = "/loadById", method = RequestMethod.POST)
@ApiOperation("根据id获取执行器")
public ReturnT<JobGroup> loadById(int id) {
JobGroup jobGroup = jobGroupMapper.load(id);
return jobGroup != null ? new ReturnT<>(jobGroup) : new ReturnT<>(ReturnT.FAIL_CODE, null);
}
@GetMapping("/query")
@ApiOperation("查询执行器")
public ReturnT<List<JobGroup>> get(@ApiParam(value = "执行器AppName")
@RequestParam(value = "appName", required = false) String appName,
@ApiParam(value = "执行器名称")
@RequestParam(value = "title", required = false) String title,
@ApiParam(value = "执行器地址列表")
@RequestParam(value = "addressList", required = false) String addressList) {
return new ReturnT<>(jobGroupMapper.find(appName, title, addressList));
}
}

View File

@@ -0,0 +1,137 @@
package com.platform.admin.controller;
import com.platform.admin.base.BaseController;
import com.platform.admin.dto.FlinkXBatchJsonBuildDto;
import com.platform.admin.dto.TriggerJobDto;
import com.platform.admin.service.JobService;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.DateUtil;
import com.platform.admin.core.cron.CronExpression;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobInfo;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务配置接口
**/
@Api(tags = "任务配置接口")
@RestController
@RequestMapping("/job")
public class JobInfoController extends BaseController {
@Resource
private JobService jobService;
@GetMapping("/pageList")
@ApiOperation("任务列表")
public ReturnT<Map<String, Object>> pageList(@RequestParam(value = "current", required = false, defaultValue = "0") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam("jobGroup") int jobGroup, @RequestParam("triggerStatus") int triggerStatus,
@RequestParam("jobDesc") String jobDesc, @RequestParam("glueType") String glueType,
@RequestParam("projectIds") Integer[] projectIds) {
return new ReturnT<>(jobService.pageList((current-1)*size, size, jobGroup, triggerStatus, jobDesc, glueType, 0, projectIds));
}
@GetMapping("/list")
@ApiOperation("全部任务列表")
public ReturnT<List<JobInfo>> list(){
return new ReturnT<>(jobService.list());
}
@PostMapping("/add")
@ApiOperation("添加任务")
public ReturnT<String> add(HttpServletRequest request, @RequestBody JobInfo jobInfo) {
jobInfo.setUserId(getCurrentUserId(request));
return jobService.add(jobInfo);
}
@PostMapping("/update")
@ApiOperation("更新任务")
public ReturnT<String> update(HttpServletRequest request,@RequestBody JobInfo jobInfo) {
jobInfo.setUserId(getCurrentUserId(request));
return jobService.update(jobInfo);
}
@PostMapping(value = "/remove/{id}")
@ApiOperation("移除任务")
public ReturnT<String> remove(@PathVariable(value = "id") int id) {
return jobService.remove(id);
}
@RequestMapping(value = "/stop",method = RequestMethod.POST)
@ApiOperation("停止任务")
public ReturnT<String> pause(int id) {
return jobService.stop(id);
}
@RequestMapping(value = "/start",method = RequestMethod.POST)
@ApiOperation("开启任务")
public ReturnT<String> start(int id) {
return jobService.start(id);
}
@PostMapping(value = "/trigger")
@ApiOperation("触发任务")
public ReturnT<String> triggerJob(@RequestBody TriggerJobDto dto) {
try {
String executorParam=dto.getExecutorParam();
if (executorParam == null) {
executorParam = "";
}
JobTriggerPoolHelper jobTriggerPoolHelper = new JobTriggerPoolHelper();
jobTriggerPoolHelper.runJob(dto.getJobId());
} catch (Exception e) {
return ReturnT.FAIL;
}
return ReturnT.SUCCESS;
}
@GetMapping("/nextTriggerTime")
@ApiOperation("获取近5次触发时间")
public ReturnT<List<String>> nextTriggerTime(String cron) {
List<String> result = new ArrayList<>();
try {
CronExpression cronExpression = new CronExpression(cron);
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = cronExpression.getNextValidTimeAfter(lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (ParseException e) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
}
return new ReturnT<>(result);
}
@PostMapping("/batchAdd")
@ApiOperation("批量创建任务")
public ReturnT<String> batchAdd(@RequestBody FlinkXBatchJsonBuildDto dto) throws IOException {
if (dto.getTemplateId() ==0) {
return new ReturnT<>(ReturnT.FAIL_CODE, (I18nUtil.getString("system_please_choose") + I18nUtil.getString("jobinfo_field_temp")));
}
return jobService.batchAdd(dto);
}
}

View File

@@ -0,0 +1,194 @@
package com.platform.admin.controller;
import com.platform.admin.entity.JobLog;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobLogMapper;
import com.platform.core.biz.ExecutorBiz;
import com.platform.core.biz.model.LogResult;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.DateUtil;
import com.platform.admin.core.kill.KillJob;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobInfo;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务运行日志接口
**/
@RestController
@RequestMapping("/log")
@Api(tags = "任务运行日志接口")
public class JobLogController {
private static Logger logger = LoggerFactory.getLogger(JobLogController.class);
@Resource
public JobInfoMapper jobInfoMapper;
@Resource
public JobLogMapper jobLogMapper;
@GetMapping("/pageList")
@ApiOperation("运行日志列表")
public ReturnT<Map<String, Object>> pageList(
@RequestParam(value = "current", required = false, defaultValue = "0") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "jobGroup") int jobGroup, @RequestParam(value = "jobId") int jobId,
@RequestParam(value = "logStatus") int logStatus, @RequestParam(value = "filterTime") String filterTime) {
// valid permission
//JobInfoController.validPermission(request, jobGroup); // 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup
// parse param
Date triggerTimeStart = null;
Date triggerTimeEnd = null;
if (filterTime != null && filterTime.trim().length() > 0) {
String[] temp = filterTime.split(" - ");
if (temp.length == 2) {
triggerTimeStart = DateUtil.parseDateTime(temp[0]);
triggerTimeEnd = DateUtil.parseDateTime(temp[1]);
}
}
// page query
List<JobLog> data = jobLogMapper.pageList((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
int cnt = jobLogMapper.pageListCount((current - 1) * size, size, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus);
// package result
Map<String, Object> maps = new HashMap<>();
maps.put("recordsTotal", cnt); // 总记录数
maps.put("recordsFiltered", cnt); // 过滤后的总记录数
maps.put("data", data); // 分页列表
return new ReturnT<>(maps);
}
@RequestMapping(value = "/logDetailCat", method = RequestMethod.GET)
@ApiOperation("运行日志详情")
public ReturnT<LogResult> logDetailCat(HttpServletRequest request,String executorAddress) {
//添加日志审计功能
try {
InputStream in = new FileInputStream(executorAddress);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) != -1) {
bos.write(buf, 0, len);
}
String logContent = new String(bos.toByteArray());
if (bos != null) {
bos.close();
}
if (in != null) {
in.close();
}
//@TODO 查看日志
ReturnT<LogResult> returnT = new ReturnT<>(ReturnT.SUCCESS_CODE, "查询日志成功");
LogResult logResult = new LogResult(0, 0, logContent, true);
returnT.setContent(logResult);
return returnT;
} catch (Exception e) {
logger.error(e.getMessage(), e);
return new ReturnT<>(ReturnT.FAIL_CODE, e.getMessage());
}
}
@RequestMapping(value = "/logKill", method = RequestMethod.POST)
@ApiOperation("kill任务")
public ReturnT<String> logKill(int id) {
// base check
JobLog log = jobLogMapper.load(id);
JobInfo jobInfo = jobInfoMapper.loadById(log.getJobId());
if (jobInfo == null) {
return new ReturnT<>(500, I18nUtil.getString("jobinfo_glue_jobid_invalid"));
}
if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) {
return new ReturnT<>(500, I18nUtil.getString("joblog_kill_log_limit"));
}
// request of kill
ReturnT<String> runResult;
try {
// ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(log.getExecutorAddress());
// runResult = executorBiz.kill(jobInfo.getId());
} catch (Exception e) {
logger.error(e.getMessage(), e);
runResult = new ReturnT<>(500, e.getMessage());
}
// if (ReturnT.SUCCESS_CODE == runResult.getCode()) {
// log.setHandleCode(ReturnT.FAIL_CODE);
// log.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : ""));
// log.setHandleTime(new Date());
// jobLogMapper.updateHandleInfo(log);
// return new ReturnT<>(runResult.getMsg());
// } else {
// return new ReturnT<>(500, runResult.getMsg());
// }
return null;
}
@PostMapping("/clearLog")
@ApiOperation("清理日志")
public ReturnT<String> clearLog(int jobGroup, int jobId, int type) {
Date clearBeforeTime = null;
int clearBeforeNum = 0;
if (type == 1) {
clearBeforeTime = DateUtil.addMonths(new Date(), -1); // 清理一个月之前日志数据
} else if (type == 2) {
clearBeforeTime = DateUtil.addMonths(new Date(), -3); // 清理三个月之前日志数据
} else if (type == 3) {
clearBeforeTime = DateUtil.addMonths(new Date(), -6); // 清理六个月之前日志数据
} else if (type == 4) {
clearBeforeTime = DateUtil.addYears(new Date(), -1); // 清理一年之前日志数据
} else if (type == 5) {
clearBeforeNum = 1000; // 清理一千条以前日志数据
} else if (type == 6) {
clearBeforeNum = 10000; // 清理一万条以前日志数据
} else if (type == 7) {
clearBeforeNum = 30000; // 清理三万条以前日志数据
} else if (type == 8) {
clearBeforeNum = 100000; // 清理十万条以前日志数据
} else if (type == 9) {
clearBeforeNum = 0; // 清理所有日志数据
} else {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_invalid"));
}
List<Long> logIds;
do {
logIds = jobLogMapper.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000);
if (logIds != null && logIds.size() > 0) {
jobLogMapper.clearLog(logIds);
}
} while (logIds != null && logIds.size() > 0);
return ReturnT.SUCCESS;
}
@ApiOperation("停止该job作业")
@PostMapping("/killJob")
public ReturnT<String> killJob(@RequestBody JobLog log) {
//获取到任务的ID执行脚本程序杀掉
//@TODO 停掉作业
String processId = log.getProcessId();
return KillJob.trigger(processId);
}
}

View File

@@ -0,0 +1,115 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.service.JobProjectService;
import com.platform.admin.entity.JobProject;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 项目管理模块
**/
@RestController
@RequestMapping("/jobProject")
@Api(tags = "项目管理模块")
public class JobProjectController extends BaseController {
@Autowired
private JobProjectService jobProjectService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
public R<IPage<JobProject>> selectAll(@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize,
@RequestParam("pageNo") Integer pageNo) {
return success(jobProjectService.getProjectListPaging(pageSize, pageNo, searchVal));
}
/**
* Get all project
*
* @return
*/
@ApiOperation("获取所有数据")
@GetMapping("/list")
public R<List<JobProject>> selectList() {
QueryWrapper<JobProject> query = new QueryWrapper();
query.eq("flag", true);
return success(jobProjectService.list(query));
}
/**
* 通过主键查询单条数据
*
* @param id 主键
* @return 单条数据
*/
@ApiOperation("通过主键查询单条数据")
@GetMapping("{id}")
public R<JobProject> selectOne(@PathVariable Serializable id) {
return success(this.jobProjectService.getById(id));
}
/**
* 新增数据
*
* @param entity 实体对象
* @return 新增结果
*/
@ApiOperation("新增数据")
@PostMapping
public R<Boolean> insert(HttpServletRequest request, @RequestBody JobProject entity) {
entity.setUserId(getCurrentUserId(request));
entity.setCreateTime(new Date());
return success(this.jobProjectService.save(entity));
}
/**
* 修改数据
*
* @param entity 实体对象
* @return 修改结果
*/
@PutMapping
@ApiOperation("修改数据")
public R<Boolean> update(@RequestBody JobProject entity) {
JobProject project = jobProjectService.getById(entity.getId());
project.setName(entity.getName());
project.setDescription(entity.getDescription());
project.setUpdateTime(new Date());
return success(this.jobProjectService.updateById(entity));
}
/**
* 删除数据
*
* @param idList 主键结合
* @return 删除结果
*/
@DeleteMapping
@ApiOperation("删除数据")
public R<Boolean> delete(@RequestParam("idList") List<Long> idList) {
return success(this.jobProjectService.removeByIds(idList));
}
}

View File

@@ -0,0 +1,95 @@
package com.platform.admin.controller;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.base.BaseForm;
import com.platform.admin.service.JobRegistryService;
import com.platform.admin.entity.JobRegistry;
import com.platform.admin.util.PageUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 执行器资源监控
**/
@RestController
@RequestMapping("/jobRegistry")
@Api(tags = "执行器资源监控")
public class JobRegistryController extends BaseController {
@Autowired
private JobRegistryService jobRegistryService;
/**
* 分页查询所有数据
*
* @return 所有数据
*/
@GetMapping
@ApiOperation("分页查询所有数据")
@ApiImplicitParams(
{@ApiImplicitParam(paramType = "query", dataType = "String", name = "current", value = "当前页", defaultValue = "1", required = true),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "size", value = "一页大小", defaultValue = "10", required = true),
@ApiImplicitParam(paramType = "query", dataType = "Boolean", name = "ifCount", value = "是否查询总数", defaultValue = "true"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "ascs", value = "升序字段,多个用逗号分隔"),
@ApiImplicitParam(paramType = "query", dataType = "String", name = "descs", value = "降序字段,多个用逗号分隔")
})
public R<IPage<JobRegistry>> selectAll() {
BaseForm baseForm = new BaseForm();
return success(this.jobRegistryService.page(baseForm.getPlusPagingQueryEntity(), pageQueryWrapperCustom(baseForm.getParameters())));
}
/**
* 自定义查询组装
*
* @param map
* @return
*/
protected QueryWrapper<JobRegistry> pageQueryWrapperCustom(Map<String, Object> map) {
// mybatis plus 分页相关的参数
Map<String, Object> pageHelperParams = PageUtils.filterPageParams(map);
//过滤空值,分页查询相关的参数
Map<String, Object> columnQueryMap = PageUtils.filterColumnQueryParams(map);
QueryWrapper<JobRegistry> queryWrapper = new QueryWrapper<>();
//排序 操作
pageHelperParams.forEach((k, v) -> {
switch (k) {
case "ascs":
queryWrapper.orderByAsc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
case "descs":
queryWrapper.orderByDesc(StrUtil.toUnderlineCase(StrUtil.toString(v)));
break;
}
});
//遍历进行字段查询条件组装
columnQueryMap.forEach((k, v) -> {
switch (k) {
case "datasourceName":
queryWrapper.like(StrUtil.toUnderlineCase(k), v);
break;
default:
queryWrapper.eq(StrUtil.toUnderlineCase(k), v);
}
});
return queryWrapper;
}
}

View File

@@ -0,0 +1,89 @@
package com.platform.admin.controller;
import com.platform.admin.base.BaseController;
import com.platform.admin.entity.JobTemplate;
import com.platform.admin.service.JobTemplateService;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.util.DateUtil;
import com.platform.admin.core.cron.CronExpression;
import com.platform.admin.core.util.I18nUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 任务模板
**/
@RestController
@RequestMapping("/jobTemplate")
@Api(tags = "任务配置接口")
public class JobTemplateController extends BaseController {
@Resource
private JobTemplateService jobTemplateService;
@GetMapping("/pageList")
@ApiOperation("任务模板列表")
public ReturnT<Map<String, Object>> pageList(@RequestParam(value = "current", required = false, defaultValue = "0") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "jobGroup") int jobGroup, @RequestParam(value = "jobDesc") String jobDesc,
@RequestParam(value = "executorHandler") String executorHandler,
@RequestParam(value = "userId") int userId, @RequestParam(value = "projectIds",required = false) Integer[] projectIds) {
return new ReturnT<>(jobTemplateService.pageList((current-1)*size, size, jobGroup, jobDesc, executorHandler, userId, projectIds));
}
@PostMapping("/add")
@ApiOperation("添加任务模板")
public ReturnT<String> add(HttpServletRequest request, @RequestBody JobTemplate jobTemplate) {
jobTemplate.setUserId(getCurrentUserId(request));
return jobTemplateService.add(jobTemplate);
}
@PostMapping("/update")
@ApiOperation("更新任务")
public ReturnT<String> update(HttpServletRequest request,@RequestBody JobTemplate jobTemplate) {
jobTemplate.setUserId(getCurrentUserId(request));
return jobTemplateService.update(jobTemplate);
}
@PostMapping(value = "/remove/{id}")
@ApiOperation("移除任务模板")
public ReturnT<String> remove(@PathVariable(value = "id") int id) {
return jobTemplateService.remove(id);
}
@GetMapping("/nextTriggerTime")
@ApiOperation("获取近5次触发时间")
public ReturnT<List<String>> nextTriggerTime(String cron) {
List<String> result = new ArrayList<>();
try {
CronExpression cronExpression = new CronExpression(cron);
Date lastTime = new Date();
for (int i = 0; i < 5; i++) {
lastTime = cronExpression.getNextValidTimeAfter(lastTime);
if (lastTime != null) {
result.add(DateUtil.formatDateTime(lastTime));
} else {
break;
}
}
} catch (ParseException e) {
return new ReturnT<>(ReturnT.FAIL_CODE, I18nUtil.getString("jobinfo_field_cron_invalid"));
}
return new ReturnT<>(result);
}
}

View File

@@ -0,0 +1,103 @@
package com.platform.admin.controller;
import com.baomidou.mybatisplus.extension.api.R;
import com.platform.admin.base.BaseController;
import com.platform.admin.service.DatasourceQueryService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 查询数据库表名,字段的控制器
**/
@RestController
@RequestMapping("/metadata")
@Api(tags = "jdbc数据库查询控制器")
public class MetadataController extends BaseController {
@Autowired
private DatasourceQueryService datasourceQueryService;
/**
* 根据数据源id获取mongo库名
*
* @param datasourceId
* @return
*/
@GetMapping("/getDBs")
@ApiOperation("根据数据源id获取mongo库名")
public R<List<String>> getDBs(Long datasourceId) throws IOException {
return success(datasourceQueryService.getDBs(datasourceId));
}
/**
* 根据数据源id,dbname获取CollectionNames
*
* @param datasourceId
* @return
*/
@GetMapping("/collectionNames")
@ApiOperation("根据数据源id,dbname获取CollectionNames")
public R<List<String>> getCollectionNames(Long datasourceId,String dbName) throws IOException {
return success(datasourceQueryService.getCollectionNames(datasourceId,dbName));
}
/**
* 获取PG table schema
*
* @param datasourceId
* @return
*/
@GetMapping("/getDBSchema")
@ApiOperation("根据数据源id获取 db schema")
public R<List<String>> getTableSchema(Long datasourceId) {
return success(datasourceQueryService.getTableSchema(datasourceId));
}
/**
* 根据数据源id获取可用表名
*
* @param datasourceId
* @return
*/
@GetMapping("/getTables")
@ApiOperation("根据数据源id获取可用表名")
public R<List<String>> getTableNames(Long datasourceId,String tableSchema) throws IOException {
return success(datasourceQueryService.getTables(datasourceId,tableSchema));
}
/**
* 根据数据源id和表名获取所有字段
*
* @param datasourceId 数据源id
* @param tableName 表名
* @return
*/
@GetMapping("/getColumns")
@ApiOperation("根据数据源id和表名获取所有字段")
public R<List<String>> getColumns(Long datasourceId, String tableName) throws IOException {
return success(datasourceQueryService.getColumns(datasourceId, tableName));
}
/**
* 根据数据源id和sql语句获取所有字段
*
* @param datasourceId 数据源id
* @param querySql 表名
* @return
*/
@GetMapping("/getColumnsByQuerySql")
@ApiOperation("根据数据源id和sql语句获取所有字段")
public R<List<String>> getColumnsByQuerySql(Long datasourceId, String querySql) throws SQLException {
return success(datasourceQueryService.getColumnsByQuerySql(datasourceId, querySql));
}
}

View File

@@ -0,0 +1,152 @@
package com.platform.admin.controller;
import cn.hutool.core.util.StrUtil;
import com.platform.core.biz.model.ReturnT;
import com.platform.admin.mapper.JobUserMapper;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobUser;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.platform.core.biz.model.ReturnT.FAIL_CODE;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 用户管理模块
**/
@RestController
@RequestMapping("/user")
@Api(tags = "用户信息接口")
public class UserController {
@Resource
private JobUserMapper jobUserMapper;
@Resource
private BCryptPasswordEncoder bCryptPasswordEncoder;
@GetMapping("/pageList")
@ApiOperation("用户列表")
public ReturnT<Map<String, Object>> pageList(@RequestParam(value = "current", required = false, defaultValue = "1") int current,
@RequestParam(value = "size", required = false, defaultValue = "10") int size,
@RequestParam(value = "username", required = false) String username) {
// page list
List<JobUser> list = jobUserMapper.pageList((current - 1) * size, size, username);
int recordsTotal = jobUserMapper.pageListCount((current - 1) * size, size, username);
// package result
Map<String, Object> maps = new HashMap<>();
maps.put("recordsTotal", recordsTotal); // 总记录数
maps.put("recordsFiltered", recordsTotal); // 过滤后的总记录数
maps.put("data", list); // 分页列表
return new ReturnT<>(maps);
}
@GetMapping("/list")
@ApiOperation("用户列表")
public ReturnT<List<JobUser>> list(String username) {
// page list
List<JobUser> list = jobUserMapper.findAll(username);
return new ReturnT<>(list);
}
@GetMapping("/getUserById")
@ApiOperation(value = "根据id获取用户")
public ReturnT<JobUser> selectById(@RequestParam("userId") Integer userId) {
return new ReturnT<>(jobUserMapper.getUserById(userId));
}
@PostMapping("/add")
@ApiOperation("添加用户")
public ReturnT<String> add(@RequestBody JobUser jobUser) {
// valid username
if (!StringUtils.hasText(jobUser.getUsername())) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_please_input") + I18nUtil.getString("user_username"));
}
jobUser.setUsername(jobUser.getUsername().trim());
if (!(jobUser.getUsername().length() >= 4 && jobUser.getUsername().length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
// valid password
if (!StringUtils.hasText(jobUser.getPassword())) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_please_input") + I18nUtil.getString("user_password"));
}
jobUser.setPassword(jobUser.getPassword().trim());
if (!(jobUser.getPassword().length() >= 4 && jobUser.getPassword().length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
jobUser.setPassword(bCryptPasswordEncoder.encode(jobUser.getPassword()));
// check repeat
JobUser existUser = jobUserMapper.loadByUserName(jobUser.getUsername());
if (existUser != null) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("user_username_repeat"));
}
// write
jobUserMapper.save(jobUser);
return ReturnT.SUCCESS;
}
@PostMapping(value = "/update")
@ApiOperation("更新用户信息")
public ReturnT<String> update(@RequestBody JobUser jobUser) {
if (StringUtils.hasText(jobUser.getPassword())) {
String pwd = jobUser.getPassword().trim();
if (StrUtil.isBlank(pwd)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_no_blank") + "密码");
}
if (!(pwd.length() >= 4 && pwd.length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
jobUser.setPassword(bCryptPasswordEncoder.encode(pwd));
} else {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_no_blank") + "密码");
}
// write
jobUserMapper.update(jobUser);
return ReturnT.SUCCESS;
}
@RequestMapping(value = "/remove", method = RequestMethod.POST)
@ApiOperation("删除用户")
public ReturnT<String> remove(int id) {
int result = jobUserMapper.delete(id);
return result != 1 ? ReturnT.FAIL : ReturnT.SUCCESS;
}
@PostMapping(value = "/updatePwd")
@ApiOperation("修改密码")
public ReturnT<String> updatePwd(@RequestBody JobUser jobUser) {
String password = jobUser.getPassword();
if (password == null || password.trim().length() == 0) {
return new ReturnT<>(ReturnT.FAIL.getCode(), "密码不可为空");
}
password = password.trim();
if (!(password.length() >= 4 && password.length() <= 20)) {
return new ReturnT<>(FAIL_CODE, I18nUtil.getString("system_length_limit") + "[4-20]");
}
// do write
JobUser existUser = jobUserMapper.loadByUserName(jobUser.getUsername());
existUser.setPassword(bCryptPasswordEncoder.encode(password));
jobUserMapper.update(existUser);
return ReturnT.SUCCESS;
}
}

View File

@@ -0,0 +1,143 @@
package com.platform.admin.core.conf;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class ExcecutorConfig implements InitializingBean, DisposableBean {
private static ExcecutorConfig excecutorConfig = null;
public static ExcecutorConfig getExcecutorConfig() {
return excecutorConfig;
}
@Override
public void afterPropertiesSet() throws Exception {
excecutorConfig = this;
}
@Override
public void destroy() throws Exception {
}
@Value("${dts.executor.chunjunHome}")
private String flinkxHome;
@Value("${dts.executor.chunjunjsonPath}")
private String flinkxjsonPath;
@Value("${dts.executor.chunjunlogHome}")
private String flinkxlogHome;
@Value("${dts.executor.dataxHome}")
private String dataxHome;
@Value("${dts.executor.dataxjsonPath}")
private String dataxjsonPath;
@Value("${dts.executor.dataxlogHome}")
private String dataxlogHome;
@Value("${common.mysql.dts.url}")
private String url;
@Value("${common.mysql.dts.driver-class-name}")
private String driverClassname;
@Value("${common.mysql.dts.username}")
private String username;
@Value("${common.mysql.dts.password}")
private String password;
public static void setExcecutorConfig(ExcecutorConfig excecutorConfig) {
ExcecutorConfig.excecutorConfig = excecutorConfig;
}
public String getFlinkxHome() {
return flinkxHome;
}
public void setFlinkxHome(String flinkxHome) {
this.flinkxHome = flinkxHome;
}
public String getFlinkxjsonPath() {
return flinkxjsonPath;
}
public void setFlinkxjsonPath(String flinkxjsonPath) {
this.flinkxjsonPath = flinkxjsonPath;
}
public String getFlinkxlogHome() {
return flinkxlogHome;
}
public void setFlinkxlogHome(String flinkxlogHome) {
this.flinkxlogHome = flinkxlogHome;
}
public String getDataxHome() {
return dataxHome;
}
public void setDataxHome(String dataxHome) {
this.dataxHome = dataxHome;
}
public String getDataxjsonPath() {
return dataxjsonPath;
}
public void setDataxjsonPath(String dataxjsonPath) {
this.dataxjsonPath = dataxjsonPath;
}
public String getDataxlogHome() {
return dataxlogHome;
}
public void setDataxlogHome(String dataxlogHome) {
this.dataxlogHome = dataxlogHome;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getDriverClassname() {
return driverClassname;
}
public void setDriverClassname(String driverClassname) {
this.driverClassname = driverClassname;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}

View File

@@ -0,0 +1,161 @@
package com.platform.admin.core.conf;
import com.platform.admin.mapper.*;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.mapper.JobDatasourceMapper;
import com.platform.admin.mapper.JobGroupMapper;
import com.platform.admin.mapper.JobInfoMapper;
import com.platform.admin.mapper.JobLogMapper;
import com.platform.admin.mapper.JobLogReportMapper;
import com.platform.admin.mapper.JobRegistryMapper;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import javax.sql.DataSource;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* xxl-job config
**/
@Component
public class JobAdminConfig implements InitializingBean, DisposableBean {
private static JobAdminConfig adminConfig = null;
public static JobAdminConfig getAdminConfig() {
return adminConfig;
}
// ---------------------- XxlJobScheduler ----------------------
private JobScheduler xxlJobScheduler;
@Override
public void afterPropertiesSet() throws Exception {
adminConfig = this;
xxlJobScheduler = new JobScheduler();
xxlJobScheduler.init();
}
@Override
public void destroy() throws Exception {
xxlJobScheduler.destroy();
}
// ---------------------- XxlJobScheduler ----------------------
// conf
@Value("${dts.job.i18n}")
private String i18n;
@Value("${dts.job.accessToken}")
private String accessToken;
@Value("${spring.mail.username}")
private String emailUserName;
@Value("${dts.job.triggerpool.fast.max}")
private int triggerPoolFastMax;
@Value("${dts.job.triggerpool.slow.max}")
private int triggerPoolSlowMax;
@Value("${dts.job.logretentiondays}")
private int logretentiondays;
@Value("${datasource.aes.key}")
private String dataSourceAESKey;
// dao, service
@Resource
private JobLogMapper jobLogMapper;
@Resource
private JobInfoMapper jobInfoMapper;
@Resource
private JobRegistryMapper jobRegistryMapper;
@Resource
private JobGroupMapper jobGroupMapper;
@Resource
private JobLogReportMapper jobLogReportMapper;
@Resource
private JavaMailSender mailSender;
@Resource
private DataSource dataSource;
@Resource
private JobDatasourceMapper jobDatasourceMapper;
public String getI18n() {
return i18n;
}
public String getAccessToken() {
return accessToken;
}
public String getEmailUserName() {
return emailUserName;
}
public int getTriggerPoolFastMax() {
return triggerPoolFastMax < 200 ? 200 : triggerPoolFastMax;
}
public int getTriggerPoolSlowMax() {
return triggerPoolSlowMax < 100 ? 100 : triggerPoolSlowMax;
}
public int getLogretentiondays() {
return logretentiondays < 7 ? -1 : logretentiondays;
}
public JobLogMapper getJobLogMapper() {
return jobLogMapper;
}
public JobInfoMapper getJobInfoMapper() {
return jobInfoMapper;
}
public JobRegistryMapper getJobRegistryMapper() {
return jobRegistryMapper;
}
public JobGroupMapper getJobGroupMapper() {
return jobGroupMapper;
}
public JobLogReportMapper getJobLogReportMapper() {
return jobLogReportMapper;
}
public JavaMailSender getMailSender() {
return mailSender;
}
public DataSource getDataSource() {
return dataSource;
}
public JobDatasourceMapper getJobDatasourceMapper() {
return jobDatasourceMapper;
}
public String getDataSourceAESKey() {
return dataSourceAESKey;
}
public void setDataSourceAESKey(String dataSourceAESKey) {
this.dataSourceAESKey = dataSourceAESKey;
}
}

View File

@@ -0,0 +1,52 @@
package com.platform.admin.core.conf;
import com.platform.core.executor.impl.JobSpringExecutor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @ClassName XxlJobConfig
* @Description: xxl-job依赖配置
* @authorAllDataDC
*/
@Configuration
@Data
@Slf4j
public class XxlJobConfig {
@Value("${xxl.job.admin.addresses}")
private String adminAddresses;
@Value("${xxl.job.executor.appname}")
private String appname;
@Value("${xxl.job.executor.port}")
private int port;
@Value("${xxl.job.executor.logpath}")
private String logPath;
@Value("${xxl.job.executor.logretentiondays}")
private int logRetentionDays;
@Bean
public JobSpringExecutor xxlJobExecutor() {
System.out.println("=============== xxl-job config init.===============");
JobSpringExecutor xxlJobSpringExecutor = new JobSpringExecutor();
xxlJobSpringExecutor.setAdminAddresses(adminAddresses);
xxlJobSpringExecutor.setAppName(appname);
// xxlJobSpringExecutor.setAddress(address);
// xxlJobSpringExecutor.setIp(ip);
xxlJobSpringExecutor.setPort(port);
// xxlJobSpringExecutor.setAccessToken(accessToken);
xxlJobSpringExecutor.setLogPath(logPath);
xxlJobSpringExecutor.setLogRetentionDays(logRetentionDays);
return xxlJobSpringExecutor;
}
}

View File

@@ -0,0 +1,51 @@
package com.platform.admin.core.handler;
import com.platform.admin.util.AESUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedTypes;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* @Description:
**/
@MappedTypes({String.class})
public class AESEncryptHandler extends BaseTypeHandler<String> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, String parameter, JdbcType jdbcType) throws SQLException {
if(StringUtils.isNotBlank(parameter)){
ps.setString(i, AESUtil.encrypt(parameter));
}else{
ps.setString(i, null);
}
}
@Override
public String getNullableResult(ResultSet rs, String columnName) throws SQLException {
String columnValue = rs.getString(columnName);
return AESUtil.decrypt(columnValue);
}
@Override
public String getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
String columnValue = rs.getString(columnIndex);
return AESUtil.decrypt(columnValue);
}
@Override
public String getNullableResult(CallableStatement cs, int columnIndex)
throws SQLException {
String columnValue = cs.getString(columnIndex);
return AESUtil.decrypt(columnValue);
}
}

View File

@@ -0,0 +1,29 @@
package com.platform.admin.core.handler;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.handler.IJobHandler;
import com.platform.core.handler.annotation.JobHandler;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* @ClassName DataxJobHandler
* @Description: executorJobHandler
* @authorAllDataDC
*/
@Slf4j
@Component
@JobHandler("executorJobHandler")
public class DataxJobHandler extends IJobHandler {
@Override
public ReturnT<String> execute(TriggerParam tgParam) throws Exception {
log.info("---------Datax定时任务开始执行--------");
//数据抽取具体的执行方法
JobTriggerPoolHelper.runJob(tgParam.getJobId());
System.out.println("---------Datax定时任务执行成功--------");
return ReturnT.SUCCESS;
}
}

View File

@@ -0,0 +1,37 @@
package com.platform.admin.core.handler;
import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.reflection.MetaObject;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
import java.util.Date;
/**
*
* @author AllDataDC
* @date 2023/3/26 11:14
* 通用的字段填充如createBy createDate这些字段的自动填充
**/
@Component
@Slf4j
public class MybatisMetaObjectHandler implements MetaObjectHandler {
@Override
public void insertFill(MetaObject metaObject) {
setFieldValByName("createDate", new Date(), metaObject);
setFieldValByName("createBy", getCurrentUser(), metaObject);
}
@Override
public void updateFill(MetaObject metaObject) {
setFieldValByName("updateDate", new Date(), metaObject);
setFieldValByName("updateBy", getCurrentUser(), metaObject);
}
private String getCurrentUser() {
return SecurityContextHolder.getContext().getAuthentication().getPrincipal().toString();
}
}

View File

@@ -0,0 +1,43 @@
package com.platform.admin.core.kill;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.core.glue.GlueTypeEnum;
import com.platform.admin.core.trigger.JobTrigger;
import com.platform.core.util.Constants;
import com.platform.core.util.ProcessUtil;
import java.util.Date;
/**
* flinkx-job trigger
*/
public class KillJob {
/**
* @param logId
* @param address
* @param processId
*/
public static ReturnT<String> trigger(String processId) {
ReturnT<String> triggerResult = null;
try {
//将作业杀掉
String cmdstr="";
if(JobTriggerPoolHelper.isWindows()){
cmdstr= Constants.CMDWINDOWTASKKILL+processId;
}else {
cmdstr=Constants.CMDLINUXTASKKILL+processId;
}
final Process process = Runtime.getRuntime().exec(cmdstr);
String prcsId = ProcessUtil.getProcessId(process);
triggerResult = new ReturnT<>(ReturnT.SUCCESS_CODE, "成功停止作业 !!!");
}catch (Exception e) {
triggerResult = new ReturnT<>(ReturnT.FAIL_CODE, null);
}
return triggerResult;
}
}

View File

@@ -0,0 +1,55 @@
package com.platform.admin.core.route;
import com.platform.admin.core.route.strategy.*;
import com.platform.admin.core.route.strategy.ExecutorRouteBusyover;
import com.platform.admin.core.route.strategy.ExecutorRouteConsistentHash;
import com.platform.admin.core.route.strategy.ExecutorRouteFailover;
import com.platform.admin.core.route.strategy.ExecutorRouteFirst;
import com.platform.admin.core.route.strategy.ExecutorRouteLFU;
import com.platform.admin.core.route.strategy.ExecutorRouteLRU;
import com.platform.admin.core.route.strategy.ExecutorRouteLast;
import com.platform.admin.core.route.strategy.ExecutorRouteRandom;
import com.platform.admin.core.route.strategy.ExecutorRouteRound;
import com.platform.admin.core.util.I18nUtil;
public enum ExecutorRouteStrategyEnum {
FIRST(I18nUtil.getString("jobconf_route_first"), new ExecutorRouteFirst()),
LAST(I18nUtil.getString("jobconf_route_last"), new ExecutorRouteLast()),
ROUND(I18nUtil.getString("jobconf_route_round"), new ExecutorRouteRound()),
RANDOM(I18nUtil.getString("jobconf_route_random"), new ExecutorRouteRandom()),
CONSISTENT_HASH(I18nUtil.getString("jobconf_route_consistenthash"), new ExecutorRouteConsistentHash()),
LEAST_FREQUENTLY_USED(I18nUtil.getString("jobconf_route_lfu"), new ExecutorRouteLFU()),
LEAST_RECENTLY_USED(I18nUtil.getString("jobconf_route_lru"), new ExecutorRouteLRU()),
FAILOVER(I18nUtil.getString("jobconf_route_failover"), new ExecutorRouteFailover()),
BUSYOVER(I18nUtil.getString("jobconf_route_busyover"), new ExecutorRouteBusyover()),
SHARDING_BROADCAST(I18nUtil.getString("jobconf_route_shard"), null);
ExecutorRouteStrategyEnum(String title, ExecutorRouter router) {
this.title = title;
this.router = router;
}
private String title;
private ExecutorRouter router;
public String getTitle() {
return title;
}
public ExecutorRouter getRouter() {
return router;
}
public static ExecutorRouteStrategyEnum match(String name, ExecutorRouteStrategyEnum defaultItem){
if (name != null) {
for (ExecutorRouteStrategyEnum item: ExecutorRouteStrategyEnum.values()) {
if (item.name().equals(name)) {
return item;
}
}
}
return defaultItem;
}
}

View File

@@ -0,0 +1,21 @@
package com.platform.admin.core.route;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public abstract class ExecutorRouter {
protected static Logger logger = LoggerFactory.getLogger(ExecutorRouter.class);
/**
* route address
*
* @param addressList
* @return ReturnT.content=address
*/
public abstract ReturnT<String> route(TriggerParam triggerParam, List<String> addressList);
}

View File

@@ -0,0 +1,43 @@
package com.platform.admin.core.route.strategy;
import com.platform.admin.core.route.ExecutorRouter;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import java.util.List;
public class ExecutorRouteBusyover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuffer idleBeatResultSB = new StringBuffer();
for (String address : addressList) {
// beat
ReturnT<String> idleBeatResult = null;
try {
// ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
// idleBeatResult = executorBiz.idleBeat(triggerParam.getJobId());
} catch (Exception e) {
logger.error(e.getMessage(), e);
idleBeatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
idleBeatResultSB.append( (idleBeatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_idleBeat") + "")
.append("<br>address").append(address)
.append("<br>code").append(idleBeatResult.getCode())
.append("<br>msg").append(idleBeatResult.getMsg());
// beat success
if (idleBeatResult.getCode() == ReturnT.SUCCESS_CODE) {
idleBeatResult.setMsg(idleBeatResultSB.toString());
idleBeatResult.setContent(address);
return idleBeatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, idleBeatResultSB.toString());
}
}

View File

@@ -0,0 +1,84 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* 分组下机器地址相同不同JOB均匀散列在不同机器上保证分组下机器分配JOB平均且每个JOB固定调度其中一台机器
* a、virtual node解决不均衡问题
* b、hash method replace hashCodeString的hashCode可能重复需要进一步扩大hashCode的取值范围
*/
public class ExecutorRouteConsistentHash extends ExecutorRouter {
private static int VIRTUAL_NODE_NUM = 100;
/**
* get hash code on 2^32 ring (md5散列的方式计算hash值)
* @param key
* @return
*/
private static long hash(String key) {
// md5 byte
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("MD5 not supported", e);
}
md5.reset();
byte[] keyBytes = null;
try {
keyBytes = key.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Unknown string :" + key, e);
}
md5.update(keyBytes);
byte[] digest = md5.digest();
// hash code, Truncate to 32-bits
long hashCode = ((long) (digest[3] & 0xFF) << 24)
| ((long) (digest[2] & 0xFF) << 16)
| ((long) (digest[1] & 0xFF) << 8)
| (digest[0] & 0xFF);
long truncateHashCode = hashCode & 0xffffffffL;
return truncateHashCode;
}
public String hashJob(int jobId, List<String> addressList) {
// ------A1------A2-------A3------
// -----------J1------------------
TreeMap<Long, String> addressRing = new TreeMap<Long, String>();
for (String address: addressList) {
for (int i = 0; i < VIRTUAL_NODE_NUM; i++) {
long addressHash = hash("SHARD-" + address + "-NODE-" + i);
addressRing.put(addressHash, address);
}
}
long jobHash = hash(String.valueOf(jobId));
SortedMap<Long, String> lastRing = addressRing.tailMap(jobHash);
if (!lastRing.isEmpty()) {
return lastRing.get(lastRing.firstKey());
}
return addressRing.firstEntry().getValue();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = hashJob(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,44 @@
package com.platform.admin.core.route.strategy;
import com.platform.admin.core.route.ExecutorRouter;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import java.util.List;
public class ExecutorRouteFailover extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
StringBuffer beatResultSB = new StringBuffer();
for (String address : addressList) {
// beat
ReturnT<String> beatResult = null;
try {
// ExecutorBiz executorBiz = JobScheduler.getExecutorBiz(address);
// beatResult = executorBiz.beat();
} catch (Exception e) {
logger.error(e.getMessage(), e);
beatResult = new ReturnT<String>(ReturnT.FAIL_CODE, ""+e );
}
beatResultSB.append( (beatResultSB.length()>0)?"<br><br>":"")
.append(I18nUtil.getString("jobconf_beat") + "")
.append("<br>address").append(address)
.append("<br>code").append(beatResult.getCode())
.append("<br>msg").append(beatResult.getMsg());
// beat success
if (beatResult.getCode() == ReturnT.SUCCESS_CODE) {
beatResult.setMsg(beatResultSB.toString());
beatResult.setContent(address);
return beatResult;
}
}
return new ReturnT<String>(ReturnT.FAIL_CODE, beatResultSB.toString());
}
}

View File

@@ -0,0 +1,16 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
public class ExecutorRouteFirst extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList){
return new ReturnT<String>(addressList.get(0));
}
}

View File

@@ -0,0 +1,78 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* 单个JOB对应的每个执行器使用频率最低的优先被选举
* a(*)、LFU(Least Frequently Used):最不经常使用,频率/次数
* b、LRU(Least Recently Used):最近最久未使用,时间
*
*/
public class ExecutorRouteLFU extends ExecutorRouter {
private static ConcurrentMap<Integer, HashMap<String, Integer>> jobLfuMap = new ConcurrentHashMap<Integer, HashMap<String, Integer>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLfuMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// lfu item init
HashMap<String, Integer> lfuItemMap = jobLfuMap.get(jobId); // Key排序可以用TreeMap+构造入参CompareValue排序暂时只能通过ArrayList
if (lfuItemMap == null) {
lfuItemMap = new HashMap<String, Integer>();
jobLfuMap.putIfAbsent(jobId, lfuItemMap); // 避免重复覆盖
}
// put new
for (String address: addressList) {
if (!lfuItemMap.containsKey(address) || lfuItemMap.get(address) >1000000 ) {
lfuItemMap.put(address, new Random().nextInt(addressList.size())); // 初始化时主动Random一次缓解首次压力
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lfuItemMap.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lfuItemMap.remove(delKey);
}
}
// load least userd count address
List<Map.Entry<String, Integer>> lfuItemList = new ArrayList<Map.Entry<String, Integer>>(lfuItemMap.entrySet());
Collections.sort(lfuItemList, new Comparator<Map.Entry<String, Integer>>() {
@Override
public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
return o1.getValue().compareTo(o2.getValue());
}
});
Map.Entry<String, Integer> addressItem = lfuItemList.get(0);
String minAddress = addressItem.getKey();
addressItem.setValue(addressItem.getValue() + 1);
return addressItem.getKey();
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,75 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* 单个JOB对应的每个执行器最久为使用的优先被选举
* a、LFU(Least Frequently Used):最不经常使用,频率/次数
* b(*)、LRU(Least Recently Used):最近最久未使用,时间
*/
public class ExecutorRouteLRU extends ExecutorRouter {
private static ConcurrentMap<Integer, LinkedHashMap<String, String>> jobLRUMap = new ConcurrentHashMap<Integer, LinkedHashMap<String, String>>();
private static long CACHE_VALID_TIME = 0;
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLRUMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// init lru
LinkedHashMap<String, String> lruItem = jobLRUMap.get(jobId);
if (lruItem == null) {
/**
* LinkedHashMap
* a、accessOrdertrue=访问顺序排序get/put时排序false=插入顺序排期;
* b、removeEldestEntry新增元素时将会调用返回true时会删除最老元素可封装LinkedHashMap并重写该方法比如定义最大容量超出是返回true即可实现固定长度的LRU算法
*/
lruItem = new LinkedHashMap<String, String>(16, 0.75f, true);
jobLRUMap.putIfAbsent(jobId, lruItem);
}
// put new
for (String address: addressList) {
if (!lruItem.containsKey(address)) {
lruItem.put(address, address);
}
}
// remove old
List<String> delKeys = new ArrayList<>();
for (String existKey: lruItem.keySet()) {
if (!addressList.contains(existKey)) {
delKeys.add(existKey);
}
}
if (delKeys.size() > 0) {
for (String delKey: delKeys) {
lruItem.remove(delKey);
}
}
// load
String eldestKey = lruItem.entrySet().iterator().next().getKey();
String eldestValue = lruItem.get(eldestKey);
return eldestValue;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = route(triggerParam.getJobId(), addressList);
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,16 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
public class ExecutorRouteLast extends ExecutorRouter {
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
return new ReturnT<String>(addressList.get(addressList.size()-1));
}
}

View File

@@ -0,0 +1,20 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
import java.util.Random;
public class ExecutorRouteRandom extends ExecutorRouter {
private static Random localRandom = new Random();
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(localRandom.nextInt(addressList.size()));
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,36 @@
package com.platform.admin.core.route.strategy;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.admin.core.route.ExecutorRouter;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class ExecutorRouteRound extends ExecutorRouter {
private static ConcurrentMap<Integer, Integer> routeCountEachJob = new ConcurrentHashMap<Integer, Integer>();
private static long CACHE_VALID_TIME = 0;
private static int count(int jobId) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
routeCountEachJob.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// count++
Integer count = routeCountEachJob.get(jobId);
count = (count==null || count>1000000)?(new Random().nextInt(100)):++count; // 初始化时主动Random一次缓解首次压力
routeCountEachJob.put(jobId, count);
return count;
}
@Override
public ReturnT<String> route(TriggerParam triggerParam, List<String> addressList) {
String address = addressList.get(count(triggerParam.getJobId())%addressList.size());
return new ReturnT<String>(address);
}
}

View File

@@ -0,0 +1,67 @@
package com.platform.admin.core.scheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.admin.core.thread.JobFailMonitorHelper;
import com.platform.admin.core.thread.JobLogReportHelper;
import com.platform.admin.core.thread.JobRegistryMonitorHelper;
import com.platform.admin.core.thread.JobScheduleHelper;
import com.platform.admin.core.thread.JobTriggerPoolHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JobScheduler {
private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class);
public void init() throws Exception {
// init i18n
initI18n();
// admin registry monitor run
JobRegistryMonitorHelper.getInstance().start();
// admin monitor run
JobFailMonitorHelper.getInstance().start();
// admin trigger pool start
JobTriggerPoolHelper.toStart();
// admin log report start
JobLogReportHelper.getInstance().start();
// start-schedule
JobScheduleHelper.getInstance().start();
logger.info(">>>>>>>>> init service-data-dts admin success.");
}
public void destroy() throws Exception {
// stop-schedule
JobScheduleHelper.getInstance().toStop();
// admin log report stop
JobLogReportHelper.getInstance().toStop();
// admin trigger pool stop
JobTriggerPoolHelper.toStop();
// admin monitor stop
JobFailMonitorHelper.getInstance().toStop();
// admin registry stop
JobRegistryMonitorHelper.getInstance().toStop();
}
// ---------------------- I18n ----------------------
private void initI18n() {
for (ExecutorBlockStrategyEnum item : ExecutorBlockStrategyEnum.values()) {
item.setTitle(I18nUtil.getString("jobconf_block_".concat(item.name())));
}
}
}

View File

@@ -0,0 +1,207 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLog;
import com.platform.core.biz.model.ReturnT;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mail.javamail.MimeMessageHelper;
import javax.mail.internet.MimeMessage;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* job monitor instance
*/
public class JobFailMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobFailMonitorHelper.class);
private static JobFailMonitorHelper instance = new JobFailMonitorHelper();
public static JobFailMonitorHelper getInstance(){
return instance;
}
// ---------------------- monitor ----------------------
private Thread monitorThread;
private volatile boolean toStop = false;
public void start(){
monitorThread = new Thread(new Runnable() {
@Override
public void run() {
// monitor
while (!toStop) {
try {
List<Long> failLogIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findFailJobLogIds(1000);
if (failLogIds!=null && !failLogIds.isEmpty()) {
for (long failLogId: failLogIds) {
// lock log
int lockRet = JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, 0, -1);
if (lockRet < 1) {
continue;
}
JobLog log = JobAdminConfig.getAdminConfig().getJobLogMapper().load(failLogId);
JobInfo info = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(log.getJobId());
// 1、fail retry monitor
if (log.getExecutorFailRetryCount() > 0) {
JobTriggerPoolHelper.trigger(log.getJobId(), TriggerTypeEnum.RETRY, (log.getExecutorFailRetryCount()-1), log.getExecutorShardingParam(), log.getExecutorParam());
String retryMsg = "<br><br><span style=\"color:#F39C12;\" > >>>>>>>>>>>"+ I18nUtil.getString("jobconf_trigger_type_retry") +"<<<<<<<<<<< </span><br>";
log.setTriggerMsg(log.getTriggerMsg() + retryMsg);
JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(log);
}
// 2、fail alarm monitor
int newAlarmStatus = 0; // 告警状态0-默认、-1=锁定状态、1-无需告警、2-告警成功、3-告警失败
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
boolean alarmResult = true;
try {
alarmResult = failAlarm(info, log);
} catch (Exception e) {
alarmResult = false;
logger.error(e.getMessage(), e);
}
newAlarmStatus = alarmResult?2:3;
} else {
newAlarmStatus = 1;
}
JobAdminConfig.getAdminConfig().getJobLogMapper().updateAlarmStatus(failLogId, -1, newAlarmStatus);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job fail monitor thread error:{0}", e);
}
}
try {
TimeUnit.SECONDS.sleep(10);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, job fail monitor thread stop");
}
});
monitorThread.setDaemon(true);
monitorThread.setName("service-data-dts, admin JobFailMonitorHelper");
monitorThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
monitorThread.interrupt();
try {
monitorThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// ---------------------- alarm ----------------------
// email alarm template
private static final String mailBodyTemplate = "<h5>" + I18nUtil.getString("jobconf_monitor_detail") + "</span>" +
"<table border=\"1\" cellpadding=\"3\" style=\"border-collapse:collapse; width:80%;\" >\n" +
" <thead style=\"font-weight: bold;color: #ffffff;background-color: #ff8c00;\" >" +
" <tr>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobgroup") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobinfo_field_id") +"</td>\n" +
" <td width=\"20%\" >"+ I18nUtil.getString("jobinfo_field_jobdesc") +"</td>\n" +
" <td width=\"10%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_title") +"</td>\n" +
" <td width=\"40%\" >"+ I18nUtil.getString("jobconf_monitor_alarm_content") +"</td>\n" +
" </tr>\n" +
" </thead>\n" +
" <tbody>\n" +
" <tr>\n" +
" <td>{0}</td>\n" +
" <td>{1}</td>\n" +
" <td>{2}</td>\n" +
" <td>"+ I18nUtil.getString("jobconf_monitor_alarm_type") +"</td>\n" +
" <td>{3}</td>\n" +
" </tr>\n" +
" </tbody>\n" +
"</table>";
/**
* fail alarm
*
* @param jobLog
*/
private boolean failAlarm(JobInfo info, JobLog jobLog){
boolean alarmResult = true;
// send monitor email
if (info!=null && info.getAlarmEmail()!=null && info.getAlarmEmail().trim().length()>0) {
// alarmContent
String alarmContent = "Alarm Job LogId=" + jobLog.getId();
if (jobLog.getTriggerCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>TriggerMsg=<br>" + jobLog.getTriggerMsg();
}
if (jobLog.getHandleCode()>0 && jobLog.getHandleCode() != ReturnT.SUCCESS_CODE) {
alarmContent += "<br>HandleCode=" + jobLog.getHandleMsg();
}
// email info
JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(Integer.valueOf(info.getJobGroup()));
String personal = I18nUtil.getString("admin_name_full");
String title = I18nUtil.getString("jobconf_monitor");
String content = MessageFormat.format(mailBodyTemplate,
group!=null?group.getTitle():"null",
info.getId(),
info.getJobDesc(),
alarmContent);
Set<String> emailSet = new HashSet<String>(Arrays.asList(info.getAlarmEmail().split(",")));
for (String email: emailSet) {
// make mail
try {
MimeMessage mimeMessage = JobAdminConfig.getAdminConfig().getMailSender().createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true);
helper.setFrom(JobAdminConfig.getAdminConfig().getEmailUserName(), personal);
helper.setTo(email);
helper.setSubject(title);
helper.setText(content, true);
JobAdminConfig.getAdminConfig().getMailSender().send(mimeMessage);
} catch (Exception e) {
logger.error(">>>>>>>>>>> service-data-dts, job fail alarm email send error, JobLogId:{}", jobLog.getId(), e);
alarmResult = false;
}
}
}
// do something, custom alarm strategy, such as sms
return alarmResult;
}
}

View File

@@ -0,0 +1,151 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.entity.JobLogReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* job log report helper
*/
public class JobLogReportHelper {
private static Logger logger = LoggerFactory.getLogger(JobLogReportHelper.class);
private static JobLogReportHelper instance = new JobLogReportHelper();
public static JobLogReportHelper getInstance(){
return instance;
}
private Thread logrThread;
private volatile boolean toStop = false;
public void start(){
logrThread = new Thread(new Runnable() {
@Override
public void run() {
// last clean log time
long lastCleanLogTime = 0;
while (!toStop) {
// 1、log-report refresh: refresh log report in 3 days
try {
for (int i = 0; i < 3; i++) {
// today
Calendar itemDay = Calendar.getInstance();
itemDay.add(Calendar.DAY_OF_MONTH, -i);
itemDay.set(Calendar.HOUR_OF_DAY, 0);
itemDay.set(Calendar.MINUTE, 0);
itemDay.set(Calendar.SECOND, 0);
itemDay.set(Calendar.MILLISECOND, 0);
Date todayFrom = itemDay.getTime();
itemDay.set(Calendar.HOUR_OF_DAY, 23);
itemDay.set(Calendar.MINUTE, 59);
itemDay.set(Calendar.SECOND, 59);
itemDay.set(Calendar.MILLISECOND, 999);
Date todayTo = itemDay.getTime();
// refresh log-report every minute
JobLogReport xxlJobLogReport = new JobLogReport();
xxlJobLogReport.setTriggerDay(todayFrom);
xxlJobLogReport.setRunningCount(0);
xxlJobLogReport.setSucCount(0);
xxlJobLogReport.setFailCount(0);
Map<String, Object> triggerCountMap = JobAdminConfig
.getAdminConfig().getJobLogMapper().findLogReport(todayFrom, todayTo);
if (triggerCountMap!=null && triggerCountMap.size()>0) {
int triggerDayCount = triggerCountMap.containsKey("triggerDayCount")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCount"))):0;
int triggerDayCountRunning = triggerCountMap.containsKey("triggerDayCountRunning")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountRunning"))):0;
int triggerDayCountSuc = triggerCountMap.containsKey("triggerDayCountSuc")? Integer.valueOf(String.valueOf(triggerCountMap.get("triggerDayCountSuc"))):0;
int triggerDayCountFail = triggerDayCount - triggerDayCountRunning - triggerDayCountSuc;
xxlJobLogReport.setRunningCount(triggerDayCountRunning);
xxlJobLogReport.setSucCount(triggerDayCountSuc);
xxlJobLogReport.setFailCount(triggerDayCountFail);
}
// do refresh
int ret = JobAdminConfig.getAdminConfig().getJobLogReportMapper().update(xxlJobLogReport);
if (ret < 1) {
JobAdminConfig.getAdminConfig().getJobLogReportMapper().save(xxlJobLogReport);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job log report thread error:{}", e);
}
}
// 2、log-clean: switch open & once each day
if (JobAdminConfig.getAdminConfig().getLogretentiondays()>0
&& System.currentTimeMillis() - lastCleanLogTime > 24*60*60*1000) {
// expire-time
Calendar expiredDay = Calendar.getInstance();
expiredDay.add(Calendar.DAY_OF_MONTH, -1 * JobAdminConfig.getAdminConfig().getLogretentiondays());
expiredDay.set(Calendar.HOUR_OF_DAY, 0);
expiredDay.set(Calendar.MINUTE, 0);
expiredDay.set(Calendar.SECOND, 0);
expiredDay.set(Calendar.MILLISECOND, 0);
Date clearBeforeTime = expiredDay.getTime();
// clean expired log
List<Long> logIds = null;
do {
logIds = JobAdminConfig.getAdminConfig().getJobLogMapper().findClearLogIds(0, 0, clearBeforeTime, 0, 1000);
if (logIds!=null && logIds.size()>0) {
JobAdminConfig.getAdminConfig().getJobLogMapper().clearLog(logIds);
}
} while (logIds!=null && logIds.size()>0);
// update clean time
lastCleanLogTime = System.currentTimeMillis();
}
try {
TimeUnit.MINUTES.sleep(1);
} catch (Exception e) {
if (!toStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, job log report thread stop");
}
});
logrThread.setDaemon(true);
logrThread.setName("service-data-dts, admin JobLogReportHelper");
logrThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
logrThread.interrupt();
try {
logrThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,103 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobRegistry;
import com.platform.core.enums.RegistryConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.TimeUnit;
public class JobRegistryMonitorHelper {
private static Logger logger = LoggerFactory.getLogger(JobRegistryMonitorHelper.class);
private static JobRegistryMonitorHelper instance = new JobRegistryMonitorHelper();
public static JobRegistryMonitorHelper getInstance(){
return instance;
}
private Thread registryThread;
private volatile boolean toStop = false;
public void start(){
registryThread = new Thread(() -> {
while (!toStop) {
try {
// auto registry group
List<JobGroup> groupList = JobAdminConfig.getAdminConfig().getJobGroupMapper().findByAddressType(0);
if (groupList!=null && !groupList.isEmpty()) {
// remove dead address (admin/executor)
List<Integer> ids = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findDead(RegistryConfig.DEAD_TIMEOUT, new Date());
if (ids!=null && ids.size()>0) {
JobAdminConfig.getAdminConfig().getJobRegistryMapper().removeDead(ids);
}
// fresh online address (admin/executor)
HashMap<String, List<String>> appAddressMap = new HashMap<>();
List<JobRegistry> list = JobAdminConfig.getAdminConfig().getJobRegistryMapper().findAll(RegistryConfig.DEAD_TIMEOUT, new Date());
if (list != null) {
for (JobRegistry item: list) {
if (RegistryConfig.RegistType.EXECUTOR.name().equals(item.getRegistryGroup())) {
String appName = item.getRegistryKey();
List<String> registryList = appAddressMap.get(appName);
if (registryList == null) {
registryList = new ArrayList<>();
}
if (!registryList.contains(item.getRegistryValue())) {
registryList.add(item.getRegistryValue());
}
appAddressMap.put(appName, registryList);
}
}
}
// fresh group address
for (JobGroup group: groupList) {
List<String> registryList = appAddressMap.get(group.getAppName());
String addressListStr = null;
if (registryList!=null && !registryList.isEmpty()) {
Collections.sort(registryList);
addressListStr = "";
for (String item:registryList) {
addressListStr += item + ",";
}
addressListStr = addressListStr.substring(0, addressListStr.length()-1);
}
group.setAddressList(addressListStr);
JobAdminConfig.getAdminConfig().getJobGroupMapper().update(group);
}
}
} catch (Exception e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job registry monitor thread error:{}", e);
}
}
try {
TimeUnit.SECONDS.sleep(RegistryConfig.BEAT_TIMEOUT);
} catch (InterruptedException e) {
if (!toStop) {
logger.error(">>>>>>>>>>> service-data-dts, job registry monitor thread error:{}", e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, job registry monitor thread stop");
});
registryThread.setDaemon(true);
registryThread.setName("service-data-dts, admin JobRegistryMonitorHelper");
registryThread.start();
}
public void toStop(){
toStop = true;
// interrupt and wait
registryThread.interrupt();
try {
registryThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,349 @@
package com.platform.admin.core.thread;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.cron.CronExpression;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.entity.JobInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
public class JobScheduleHelper {
private static Logger logger = LoggerFactory.getLogger(JobScheduleHelper.class);
private static JobScheduleHelper instance = new JobScheduleHelper();
public static JobScheduleHelper getInstance() {
return instance;
}
public static final long PRE_READ_MS = 5000; // pre read
private Thread scheduleThread;
private Thread ringThread;
private volatile boolean scheduleThreadToStop = false;
private volatile boolean ringThreadToStop = false;
private volatile static Map<Integer, List<Integer>> ringData = new ConcurrentHashMap<>();
public void start() {
// schedule thread
scheduleThread = new Thread(new Runnable() {
@Override
public void run() {
try {
TimeUnit.MILLISECONDS.sleep(5000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>> init service-data-dts admin scheduler success.");
// pre-read count: treadpool-size * trigger-qps (each trigger cost 50ms, qps = 1000/50 = 20)
int preReadCount = (JobAdminConfig.getAdminConfig().getTriggerPoolFastMax() + JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax()) * 20;
while (!scheduleThreadToStop) {
// Scan Job
long start = System.currentTimeMillis();
Connection conn = null;
Boolean connAutoCommit = null;
PreparedStatement preparedStatement = null;
boolean preReadSuc = true;
try {
conn = JobAdminConfig.getAdminConfig().getDataSource().getConnection();
connAutoCommit = conn.getAutoCommit();
conn.setAutoCommit(false);
preparedStatement = conn.prepareStatement("select * from job_lock where lock_name = 'schedule_lock' for update");
preparedStatement.execute();
// tx start
// 1、pre read
long nowTime = System.currentTimeMillis();
List<JobInfo> scheduleList = JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleJobQuery(nowTime + PRE_READ_MS, preReadCount);
if (scheduleList != null && scheduleList.size() > 0) {
// 2、push time-ring
for (JobInfo jobInfo : scheduleList) {
// time-ring jump
if (nowTime > jobInfo.getTriggerNextTime() + PRE_READ_MS) {
// 2.1、trigger-expire > 5spass && make next-trigger-time
logger.warn(">>>>>>>>>>> service-data-dts, schedule misfire, jobId = " + jobInfo.getId());
// fresh next
refreshNextValidTime(jobInfo, new Date());
} else if (nowTime > jobInfo.getTriggerNextTime()) {
// 2.2、trigger-expire < 5sdirect-trigger && make next-trigger-time
// 1、trigger
JobTriggerPoolHelper.trigger(jobInfo.getId(), TriggerTypeEnum.CRON, -1, null, null);
logger.debug(">>>>>>>>>>> service-data-dts, schedule push trigger : jobId = " + jobInfo.getId());
// 2、fresh next
refreshNextValidTime(jobInfo, new Date());
// next-trigger-time in 5s, pre-read again
if (jobInfo.getTriggerStatus() == 1 && nowTime + PRE_READ_MS > jobInfo.getTriggerNextTime()) {
// 1、make ring second
int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
// 2、push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3、fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
} else {
// 2.3、trigger-pre-readtime-ring trigger && make next-trigger-time
// 1、make ring second
int ringSecond = (int) ((jobInfo.getTriggerNextTime() / 1000) % 60);
// 2、push time ring
pushTimeRing(ringSecond, jobInfo.getId());
// 3、fresh next
refreshNextValidTime(jobInfo, new Date(jobInfo.getTriggerNextTime()));
}
}
// 3、update trigger info
for (JobInfo jobInfo : scheduleList) {
JobAdminConfig.getAdminConfig().getJobInfoMapper().scheduleUpdate(jobInfo);
}
} else {
preReadSuc = false;
}
// tx stop
} catch (Exception e) {
if (!scheduleThreadToStop) {
logger.error(">>>>>>>>>>> service-data-dts, JobScheduleHelper#scheduleThread error:{}", e);
}
} finally {
// commit
if (conn != null) {
try {
conn.commit();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.setAutoCommit(connAutoCommit);
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
try {
conn.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
// close PreparedStatement
if (null != preparedStatement) {
try {
preparedStatement.close();
} catch (SQLException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
long cost = System.currentTimeMillis() - start;
// Wait seconds, align second
if (cost < 1000) { // scan-overtime, not wait
try {
// pre-read period: success > scan each second; fail > skip this period;
TimeUnit.MILLISECONDS.sleep((preReadSuc ? 1000 : PRE_READ_MS) - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!scheduleThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, JobScheduleHelper#scheduleThread stop");
}
});
scheduleThread.setDaemon(true);
scheduleThread.setName("service-data-dts, admin JobScheduleHelper#scheduleThread");
scheduleThread.start();
// ring thread
ringThread = new Thread(() -> {
// align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
while (!ringThreadToStop) {
try {
// second data
List<Integer> ringItemData = new ArrayList<>();
int nowSecond = Calendar.getInstance().get(Calendar.SECOND); // 避免处理耗时太长,跨过刻度,向前校验一个刻度;
for (int i = 0; i < 2; i++) {
List<Integer> tmpData = ringData.remove((nowSecond + 60 - i) % 60);
if (tmpData != null) {
ringItemData.addAll(tmpData);
}
}
// ring trigger
logger.debug(">>>>>>>>>>> service-data-dts, time-ring beat : " + nowSecond + " = " + Arrays.asList(ringItemData));
if (ringItemData.size() > 0) {
// do trigger
for (int jobId : ringItemData) {
// do trigger
JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null);
}
// clear
ringItemData.clear();
}
} catch (Exception e) {
if (!ringThreadToStop) {
logger.error(">>>>>>>>>>> service-data-dts, JobScheduleHelper#ringThread error:{}", e);
}
}
// next second, align second
try {
TimeUnit.MILLISECONDS.sleep(1000 - System.currentTimeMillis() % 1000);
} catch (InterruptedException e) {
if (!ringThreadToStop) {
logger.error(e.getMessage(), e);
}
}
}
logger.info(">>>>>>>>>>> service-data-dts, JobScheduleHelper#ringThread stop");
});
ringThread.setDaemon(true);
ringThread.setName("service-data-dts, admin JobScheduleHelper#ringThread");
ringThread.start();
}
private void refreshNextValidTime(JobInfo jobInfo, Date fromTime) throws ParseException {
Date nextValidTime = new CronExpression(jobInfo.getJobCron()).getNextValidTimeAfter(fromTime);
if (nextValidTime != null) {
jobInfo.setTriggerLastTime(jobInfo.getTriggerNextTime());
jobInfo.setTriggerNextTime(nextValidTime.getTime());
} else {
jobInfo.setTriggerStatus(0);
jobInfo.setTriggerLastTime(0);
jobInfo.setTriggerNextTime(0);
}
}
private void pushTimeRing(int ringSecond, int jobId) {
// push async ring
List<Integer> ringItemData = ringData.get(ringSecond);
if (ringItemData == null) {
ringItemData = new ArrayList<Integer>();
ringData.put(ringSecond, ringItemData);
}
ringItemData.add(jobId);
logger.debug(">>>>>>>>>>> service-data-dts, schedule push time-ring : " + ringSecond + " = " + Arrays.asList(ringItemData));
}
public void toStop() {
// 1、stop schedule
scheduleThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1); // wait
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (scheduleThread.getState() != Thread.State.TERMINATED) {
// interrupt and wait
scheduleThread.interrupt();
try {
scheduleThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// if has ring data
boolean hasRingData = false;
if (!ringData.isEmpty()) {
for (int second : ringData.keySet()) {
List<Integer> tmpData = ringData.get(second);
if (tmpData != null && tmpData.size() > 0) {
hasRingData = true;
break;
}
}
}
if (hasRingData) {
try {
TimeUnit.SECONDS.sleep(8);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
// stop ring (wait job-in-memory stop)
ringThreadToStop = true;
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
if (ringThread.getState() != Thread.State.TERMINATED) {
// interrupt and wait
ringThread.interrupt();
try {
ringThread.join();
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
logger.info(">>>>>>>>>>> service-data-dts, JobScheduleHelper stop");
}
}

View File

@@ -0,0 +1,274 @@
package com.platform.admin.core.thread;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.IdUtil;
import com.platform.admin.core.conf.ExcecutorConfig;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.trigger.JobTrigger;
import com.platform.admin.core.trigger.TriggerTypeEnum;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLog;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.log.JobLogger;
import com.platform.core.util.Constants;
import com.platform.core.util.ProcessUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* job trigger thread pool helper
*/
public class JobTriggerPoolHelper {
private static Logger logger = LoggerFactory.getLogger(JobTriggerPoolHelper.class);
// ---------------------- trigger pool ----------------------
// fast/slow thread pool
private ThreadPoolExecutor fastTriggerPool = null;
private ThreadPoolExecutor slowTriggerPool = null;
public void start() {
fastTriggerPool = new ThreadPoolExecutor(
10,
JobAdminConfig.getAdminConfig().getTriggerPoolFastMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(1000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "service-data-dts, admin JobTriggerPoolHelper-fastTriggerPool-" + r.hashCode());
}
});
slowTriggerPool = new ThreadPoolExecutor(
10,
JobAdminConfig.getAdminConfig().getTriggerPoolSlowMax(),
60L,
TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(2000),
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "service-data-dts, admin JobTriggerPoolHelper-slowTriggerPool-" + r.hashCode());
}
});
}
public void stop() {
//triggerPool.shutdown();
fastTriggerPool.shutdownNow();
slowTriggerPool.shutdownNow();
logger.info(">>>>>>>>> service-data-dts trigger thread pool shutdown success.");
}
// job timeout count
private volatile long minTim = System.currentTimeMillis() / 60000; // ms > min
private volatile ConcurrentMap<Integer, AtomicInteger> jobTimeoutCountMap = new ConcurrentHashMap<>();
/**
* add trigger
*/
public void addTrigger(final int jobId, final TriggerTypeEnum triggerType, final int failRetryCount, final String executorShardingParam, final String executorParam) {
// choose thread pool
ThreadPoolExecutor triggerPool_ = fastTriggerPool;
AtomicInteger jobTimeoutCount = jobTimeoutCountMap.get(jobId);
if (jobTimeoutCount != null && jobTimeoutCount.get() > 10) { // job-timeout 10 times in 1 min
triggerPool_ = slowTriggerPool;
}
// trigger
triggerPool_.execute(() -> {
long start = System.currentTimeMillis();
try {
// do trigger
JobTrigger.trigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
// check timeout-count-map
long minTim_now = System.currentTimeMillis() / 60000;
if (minTim != minTim_now) {
minTim = minTim_now;
jobTimeoutCountMap.clear();
}
// incr timeout-count-map
long cost = System.currentTimeMillis() - start;
if (cost > 500) { // ob-timeout threshold 500ms
AtomicInteger timeoutCount = jobTimeoutCountMap.putIfAbsent(jobId, new AtomicInteger(1));
if (timeoutCount != null) {
timeoutCount.incrementAndGet();
}
}
}
});
}
// ---------------------- helper ----------------------
private static JobTriggerPoolHelper helper = new JobTriggerPoolHelper();
public static void toStart() {
helper.start();
}
public static void toStop() {
helper.stop();
}
/**
* @param jobId
* @param triggerType
* @param failRetryCount >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
helper.addTrigger(jobId, triggerType, failRetryCount, executorShardingParam, executorParam);
}
public static String[] buildFlinkXExecutorCmd(String flinkXShPath, String tmpFilePath,int jobId) {
long timestamp = System.currentTimeMillis();
List<String> cmdArr = new ArrayList<>();
if(JobTriggerPoolHelper.isWindows()) {
cmdArr.add(Constants.CMDWINDOW);
cmdArr.add(flinkXShPath);
cmdArr.add(tmpFilePath);
} else {
cmdArr.add(Constants.CMDLINUX);
cmdArr.add(flinkXShPath);
cmdArr.add(tmpFilePath);
}
String logHome = ExcecutorConfig.getExcecutorConfig().getFlinkxlogHome();
File folder = new File(logHome);
if (!folder.exists() && !folder.isDirectory()) {
folder.mkdirs();
}
// cmdArr.add(logHome+"/"+jobId+""+timestamp+".out");
logger.info(cmdArr + " " + flinkXShPath + " " + tmpFilePath);
return cmdArr.toArray(new String[cmdArr.size()]);
}
public static boolean isWindows() {
return System.getProperty("os.name").toLowerCase().contains("windows");
}
public static void runJob(int jobId) {
InputStreamReader isReader = null;
BufferedReader bfReader = null;
FileOutputStream out = null;
try {
JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId);
String cmdstr = "";
String tmpFilePath ="";
String[] cmdarrayFinal = null;
tmpFilePath = generateTemJsonFile(jobInfo.getJobJson());
cmdarrayFinal = buildFlinkXExecutorCmd(ExcecutorConfig.getExcecutorConfig().getFlinkxHome(), tmpFilePath, jobId);
for (int j = 0; j < cmdarrayFinal.length; j++) {
if (cmdarrayFinal[j].contains(".log")) {
cmdstr += " > " + cmdarrayFinal[j] ;
}else {
cmdstr += cmdarrayFinal[j] + " ";
}
}
if(cmdstr.indexOf("python")>0){
cmdstr = cmdstr.substring(cmdstr.indexOf("python"), cmdstr.length());
}
final Process process = Runtime.getRuntime().exec(cmdstr);
String prcsId = ProcessUtil.getProcessId(process);
JobLogger.log("Execute: " + cmdstr);
JobLogger.log("process id: " + prcsId);
//jeff优化直接执行不生效问题
isReader = new InputStreamReader(process.getInputStream(), "UTF-8");
bfReader = new BufferedReader(isReader);
String line = null;
String logPath = ExcecutorConfig.getExcecutorConfig().getFlinkxlogHome()+"/"+jobId+""+System.currentTimeMillis()+".log";
JobLogger.log("logPath: " + logPath);
out = new FileOutputStream(logPath);
while ((line = bfReader.readLine()) != null){
logger.info(line);
out.write(line.getBytes());
String newLine = System.getProperty("line.separator");
out.write(newLine.getBytes());
}
process.waitFor();
if (FileUtil.exist(tmpFilePath)) {
// FileUtil.del(new File(tmpFilePath));
}
// 记录日志
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MILLISECOND, 0);
Date triggerTime = calendar.getTime();
JobLog jobLog = new JobLog();
jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(triggerTime);
jobLog.setJobDesc(jobInfo.getJobDesc());
jobLog.setHandleTime(triggerTime);
jobLog.setTriggerCode(ReturnT.SUCCESS_CODE);
jobLog.setHandleCode(0);
jobLog.setProcessId(prcsId);
// 设置job的执行路径
jobLog.setExecutorAddress(logPath);
JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog);
} catch (Exception e) {
e.printStackTrace();
}finally {
if(out != null){
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(bfReader != null){
try {
bfReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(isReader != null){
try {
isReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private static String generateTemJsonFile(String jobJson) {
String jsonPath = "";
jsonPath = ExcecutorConfig.getExcecutorConfig().getFlinkxjsonPath();
if (!FileUtil.exist(jsonPath)) {
FileUtil.mkdir(jsonPath);
}
String tmpFilePath = jsonPath + "jobTmp-" + IdUtil.simpleUUID() + ".json";
//jobJSON进行替换操作
// 根据json写入到临时本地文件
try (PrintWriter writer = new PrintWriter(tmpFilePath, "UTF-8")) {
writer.println(jobJson);
} catch (FileNotFoundException | UnsupportedEncodingException e) {
JobLogger.log("JSON 临时文件写入异常:" + e.getMessage());
}
return tmpFilePath;
}
}

View File

@@ -0,0 +1,258 @@
package com.platform.admin.core.trigger;
import com.platform.rpc.util.IpUtil;
import com.platform.admin.core.conf.JobAdminConfig;
import com.platform.admin.core.route.ExecutorRouteStrategyEnum;
import com.platform.admin.core.scheduler.JobScheduler;
import com.platform.admin.core.util.I18nUtil;
import com.platform.admin.entity.JobDatasource;
import com.platform.admin.entity.JobGroup;
import com.platform.admin.entity.JobInfo;
import com.platform.admin.entity.JobLog;
import com.platform.admin.tool.query.BaseQueryTool;
import com.platform.admin.tool.query.QueryToolFactory;
import com.platform.admin.util.JSONUtils;
import com.platform.core.biz.ExecutorBiz;
import com.platform.core.biz.impl.ExecutorBizImpl;
import com.platform.core.biz.model.ReturnT;
import com.platform.core.biz.model.TriggerParam;
import com.platform.core.enums.ExecutorBlockStrategyEnum;
import com.platform.core.enums.IncrementTypeEnum;
import com.platform.core.glue.GlueTypeEnum;
import io.netty.util.internal.ThrowableUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Calendar;
import java.util.Date;
public class JobTrigger {
private static Logger logger = LoggerFactory.getLogger(JobTrigger.class);
/**
* trigger job
*
* @param jobId
* @param triggerType
* @param failRetrypublic class JobTriggerPoolHelper {Count >=0: use this param
* <0: use param from job info config
* @param executorShardingParam
* @param executorParam null: use job param
* not null: cover job param
*/
public static void trigger(int jobId, TriggerTypeEnum triggerType, int failRetryCount, String executorShardingParam, String executorParam) {
JobInfo jobInfo = JobAdminConfig.getAdminConfig().getJobInfoMapper().loadById(jobId);
if (jobInfo == null) {
logger.warn(">>>>>>>>>>>> trigger fail, jobId invalidjobId={}", jobId);
return;
}
if (GlueTypeEnum.BEAN.getDesc().equals(jobInfo.getGlueType())) {
//解密账密
String json = JSONUtils.changeJson(jobInfo.getJobJson(), JSONUtils.decrypt);
jobInfo.setJobJson(json);
}
if (StringUtils.isNotBlank(executorParam)) {
jobInfo.setExecutorParam(executorParam);
}
int finalFailRetryCount = failRetryCount >= 0 ? failRetryCount : jobInfo.getExecutorFailRetryCount();
JobGroup group = JobAdminConfig.getAdminConfig().getJobGroupMapper().load(jobInfo.getJobGroup());
// sharding param
int[] shardingParam = null;
if (executorShardingParam != null) {
String[] shardingArr = executorShardingParam.split("/");
if (shardingArr.length == 2 && isNumeric(shardingArr[0]) && isNumeric(shardingArr[1])) {
shardingParam = new int[2];
shardingParam[0] = Integer.valueOf(shardingArr[0]);
shardingParam[1] = Integer.valueOf(shardingArr[1]);
}
}
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null)
&& group.getRegistryList() != null && !group.getRegistryList().isEmpty()
&& shardingParam == null) {
logger.info("多任务processTrigger开始...");
for (int i = 0; i < group.getRegistryList().size(); i++) {
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, i, group.getRegistryList().size());
}
} else {
if (shardingParam == null) {
shardingParam = new int[]{0, 1};
}
logger.info("单任务processTrigger开始...");
processTrigger(group, jobInfo, finalFailRetryCount, triggerType, shardingParam[0], shardingParam[1]);
}
}
private static boolean isNumeric(String str) {
try {
int result = Integer.valueOf(str);
return true;
} catch (NumberFormatException e) {
return false;
}
}
/**
* @param group job group, registry list may be empty
* @param jobInfo
* @param finalFailRetryCount
* @param triggerType
* @param index sharding index
* @param total sharding index
*/
private static void processTrigger(JobGroup group, JobInfo jobInfo, int finalFailRetryCount, TriggerTypeEnum triggerType, int index, int total) {
TriggerParam triggerParam = new TriggerParam();
// param
ExecutorBlockStrategyEnum blockStrategy = ExecutorBlockStrategyEnum
.match(jobInfo.getExecutorBlockStrategy(), ExecutorBlockStrategyEnum.SERIAL_EXECUTION); // block strategy
ExecutorRouteStrategyEnum executorRouteStrategyEnum = ExecutorRouteStrategyEnum.match(jobInfo.getExecutorRouteStrategy(), null); // route strategy
String shardingParam = (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) ? String.valueOf(index).concat("/").concat(String.valueOf(total)) : null;
// 1、save log-id
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MILLISECOND, 0);
Date triggerTime = calendar.getTime();
JobLog jobLog = new JobLog();
jobLog.setJobGroup(jobInfo.getJobGroup());
jobLog.setJobId(jobInfo.getId());
jobLog.setTriggerTime(triggerTime);
jobLog.setJobDesc(jobInfo.getJobDesc());
long saveCount = JobAdminConfig.getAdminConfig().getJobLogMapper().save(jobLog);
logger.info(">>>>>>>>>>> service-data-dts trigger start, jobId:{}", jobLog.getId());
logger.info(">>>>>>>>>>> service-data-dts trigger start, saveCount:{}", saveCount);
// 2、init trigger-param
triggerParam.setJobId(jobInfo.getId());
triggerParam.setExecutorHandler(jobInfo.getExecutorHandler());
triggerParam.setExecutorParams(jobInfo.getExecutorParam());
triggerParam.setExecutorBlockStrategy(jobInfo.getExecutorBlockStrategy());
triggerParam.setExecutorTimeout(jobInfo.getExecutorTimeout());
triggerParam.setLogId(jobLog.getId());
triggerParam.setLogDateTime(jobLog.getTriggerTime().getTime());
triggerParam.setGlueType(jobInfo.getGlueType());
triggerParam.setGlueSource(jobInfo.getGlueSource());
triggerParam.setGlueUpdatetime(jobInfo.getGlueUpdatetime().getTime());
triggerParam.setBroadcastIndex(index);
triggerParam.setBroadcastTotal(total);
triggerParam.setJobJson(jobInfo.getJobJson());
//increment parameter
Integer incrementType = jobInfo.getIncrementType();
if (incrementType != null) {
triggerParam.setIncrementType(incrementType);
if (IncrementTypeEnum.ID.getCode() == incrementType) {
long maxId = getMaxId(jobInfo);
jobLog.setMaxId(maxId);
triggerParam.setEndId(maxId);
triggerParam.setStartId(jobInfo.getIncStartId());
} else if (IncrementTypeEnum.TIME.getCode() == incrementType) {
triggerParam.setStartTime(jobInfo.getIncStartTime());
triggerParam.setTriggerTime(triggerTime);
triggerParam.setReplaceParamType(jobInfo.getReplaceParamType());
} else if (IncrementTypeEnum.PARTITION.getCode() == incrementType) {
triggerParam.setPartitionInfo(jobInfo.getPartitionInfo());
}
triggerParam.setReplaceParam(jobInfo.getReplaceParam());
}
//jvm parameter
triggerParam.setJvmParam(jobInfo.getJvmParam());
// 3、init address
String address = null;
ReturnT<String> routeAddressResult = null;
if (group.getRegistryList() != null && !group.getRegistryList().isEmpty()) {
if (ExecutorRouteStrategyEnum.SHARDING_BROADCAST == executorRouteStrategyEnum) {
if (index < group.getRegistryList().size()) {
address = group.getRegistryList().get(index);
} else {
address = group.getRegistryList().get(0);
}
} else {
routeAddressResult = executorRouteStrategyEnum.getRouter().route(triggerParam, group.getRegistryList());
if (routeAddressResult.getCode() == ReturnT.SUCCESS_CODE) {
address = routeAddressResult.getContent();
}
}
} else {
routeAddressResult = new ReturnT<String>(ReturnT.FAIL_CODE, I18nUtil.getString("jobconf_trigger_address_empty"));
}
// 4、trigger remote executor
ReturnT<String> triggerResult = null;
if (address != null) {
triggerResult = runExecutor(triggerParam, address);
} else {
triggerResult = new ReturnT<String>(ReturnT.FAIL_CODE, null);
}
// 5、collection trigger info
StringBuffer triggerMsgSb = new StringBuffer();
triggerMsgSb.append(I18nUtil.getString("jobconf_trigger_type")).append("").append(triggerType.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_admin_adress")).append("").append(
IpUtil.getIp());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regtype")).append("")
.append((group.getAddressType() == 0) ? I18nUtil.getString("jobgroup_field_addressType_0") : I18nUtil.getString("jobgroup_field_addressType_1"));
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobconf_trigger_exe_regaddress")).append("").append(group.getRegistryList());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorRouteStrategy")).append("").append(executorRouteStrategyEnum.getTitle());
if (shardingParam != null) {
triggerMsgSb.append("(" + shardingParam + ")");
}
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorBlockStrategy")).append("").append(blockStrategy.getTitle());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_timeout")).append("").append(jobInfo.getExecutorTimeout());
triggerMsgSb.append("<br>").append(I18nUtil.getString("jobinfo_field_executorFailRetryCount")).append("").append(finalFailRetryCount);
triggerMsgSb.append("<br><br><span style=\"color:#00c0ef;\" > >>>>>>>>>>>" + I18nUtil.getString("jobconf_trigger_run") + "<<<<<<<<<<< </span><br>")
.append((routeAddressResult != null && routeAddressResult.getMsg() != null) ? routeAddressResult.getMsg() + "<br><br>" : "").append((triggerResult != null && triggerResult.getMsg() != null) ? triggerResult.getMsg() : "");
// 6、save log trigger-info
jobLog.setExecutorAddress(address);
jobLog.setExecutorHandler(jobInfo.getExecutorHandler());
jobLog.setExecutorParam(jobInfo.getExecutorParam());
jobLog.setExecutorShardingParam(shardingParam);
jobLog.setExecutorFailRetryCount(finalFailRetryCount);
jobLog.setTriggerCode(triggerResult.getCode());
jobLog.setTriggerMsg(triggerMsgSb.toString());
int uodateCount = JobAdminConfig.getAdminConfig().getJobLogMapper().updateTriggerInfo(jobLog);
logger.info(">>>>>>>>>>> service-data-dts trigger end, jobId:{}", jobLog.getId());
logger.info(">>>>>>>>>>> service-data-dts trigger end, uodateCount:{}", uodateCount);
}
private static long getMaxId(JobInfo jobInfo) {
JobDatasource datasource = JobAdminConfig.getAdminConfig().getJobDatasourceMapper().selectById(jobInfo.getDatasourceId());
BaseQueryTool qTool = QueryToolFactory.getByDbType(datasource);
return qTool.getMaxIdVal(jobInfo.getReaderTable(), jobInfo.getPrimaryKey());
}
/**
* run executor
*
* @param triggerParam
* @param address
* @return
*/
public static ReturnT<String> runExecutor(TriggerParam triggerParam, String address) {
ReturnT<String> runResult = null;
try {
// 进行任务的触发
ExecutorBiz executorBiz = new ExecutorBizImpl();
runResult = executorBiz.run(triggerParam);
} catch (Exception e) {
logger.error(">>>>>>>>>>> service-data-dts trigger error, please check if the executor[{}] is running.", address, e);
runResult = new ReturnT<String>(ReturnT.FAIL_CODE, ThrowableUtil.stackTraceToString(e));
}
StringBuffer runResultSB = new StringBuffer(I18nUtil.getString("jobconf_trigger_run") + "");
runResultSB.append("<br>address").append(address);
runResultSB.append("<br>code").append(runResult.getCode());
runResultSB.append("<br>msg").append(runResult.getMsg());
runResult.setMsg(runResultSB.toString());
return runResult;
}
}

View File

@@ -0,0 +1,22 @@
package com.platform.admin.core.trigger;
import com.platform.admin.core.util.I18nUtil;
public enum TriggerTypeEnum {
MANUAL(I18nUtil.getString("jobconf_trigger_type_manual")),
CRON(I18nUtil.getString("jobconf_trigger_type_cron")),
RETRY(I18nUtil.getString("jobconf_trigger_type_retry")),
PARENT(I18nUtil.getString("jobconf_trigger_type_parent")),
API(I18nUtil.getString("jobconf_trigger_type_api"));
private TriggerTypeEnum(String title){
this.title = title;
}
private String title;
public String getTitle() {
return title;
}
}

View File

@@ -0,0 +1,75 @@
package com.platform.admin.core.util;
import com.platform.admin.core.conf.JobAdminConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertiesLoaderUtils;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
public class I18nUtil {
private static Logger logger = LoggerFactory.getLogger(I18nUtil.class);
private static Properties prop = null;
public static Properties loadI18nProp(){
if (prop != null) {
return prop;
}
try {
// build i18n prop
String i18n = JobAdminConfig.getAdminConfig().getI18n();
i18n = (i18n!=null && i18n.trim().length()>0)?("_"+i18n):i18n;
String i18nFile = MessageFormat.format("i18n/message{0}.properties", i18n);
// load prop
Resource resource = new ClassPathResource(i18nFile);
EncodedResource encodedResource = new EncodedResource(resource,"UTF-8");
prop = PropertiesLoaderUtils.loadProperties(encodedResource);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return prop;
}
/**
* get val of i18n key
*
* @param key
* @return
*/
public static String getString(String key) {
return loadI18nProp().getProperty(key);
}
/**
* get mult val of i18n mult key, as json
*
* @param keys
* @return
*/
public static String getMultString(String... keys) {
Map<String, String> map = new HashMap<String, String>();
Properties prop = loadI18nProp();
if (keys!=null && keys.length>0) {
for (String key: keys) {
map.put(key, prop.getProperty(key));
}
} else {
for (String key: prop.stringPropertyNames()) {
map.put(key, prop.getProperty(key));
}
}
String json = JacksonUtil.writeValueAsString(map);
return json;
}
}

View File

@@ -0,0 +1,85 @@
package com.platform.admin.core.util;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class JacksonUtil {
private static Logger logger = LoggerFactory.getLogger(JacksonUtil.class);
private final static ObjectMapper objectMapper = new ObjectMapper();
public static ObjectMapper getInstance() {
return objectMapper;
}
/**
* bean、array、List、Map --> json
*
* @param obj
* @return json string
* @throws Exception
*/
public static String writeValueAsString(Object obj) {
try {
return getInstance().writeValueAsString(obj);
} catch (JsonGenerationException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> bean、Map、List(array)
*
* @param jsonStr
* @param clazz
* @return obj
* @throws Exception
*/
public static <T> T readValue(String jsonStr, Class<T> clazz) {
try {
return getInstance().readValue(jsonStr, clazz);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* string --> List<Bean>...
*
* @param jsonStr
* @param parametrized
* @param parameterClasses
* @param <T>
* @return
*/
public static <T> T readValue(String jsonStr, Class<?> parametrized, Class<?>... parameterClasses) {
try {
JavaType javaType = getInstance().getTypeFactory().constructParametricType(parametrized, parameterClasses);
return getInstance().readValue(jsonStr, javaType);
} catch (JsonParseException e) {
logger.error(e.getMessage(), e);
} catch (JsonMappingException e) {
logger.error(e.getMessage(), e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return null;
}
}

View File

@@ -0,0 +1,129 @@
package com.platform.admin.core.util;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class LocalCacheUtil {
private static ConcurrentMap<String, LocalCacheData> cacheRepository = new ConcurrentHashMap<String, LocalCacheData>(); // 类型建议用抽象父类,兼容性更好;
private static class LocalCacheData{
private String key;
private Object val;
private long timeoutTime;
public LocalCacheData() {
}
public LocalCacheData(String key, Object val, long timeoutTime) {
this.key = key;
this.val = val;
this.timeoutTime = timeoutTime;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public Object getVal() {
return val;
}
public void setVal(Object val) {
this.val = val;
}
public long getTimeoutTime() {
return timeoutTime;
}
public void setTimeoutTime(long timeoutTime) {
this.timeoutTime = timeoutTime;
}
}
/**
* set cache
*
* @param key
* @param val
* @param cacheTime
* @return
*/
public static boolean set(String key, Object val, long cacheTime){
// clean timeout cache, before set new cache (avoid cache too much)
cleanTimeoutCache();
// set new cache
if (key==null || key.trim().length()==0) {
return false;
}
if (val == null) {
remove(key);
}
if (cacheTime <= 0) {
remove(key);
}
long timeoutTime = System.currentTimeMillis() + cacheTime;
LocalCacheData localCacheData = new LocalCacheData(key, val, timeoutTime);
cacheRepository.put(localCacheData.getKey(), localCacheData);
return true;
}
/**
* remove cache
*
* @param key
* @return
*/
public static boolean remove(String key){
if (key==null || key.trim().length()==0) {
return false;
}
cacheRepository.remove(key);
return true;
}
/**
* get cache
*
* @param key
* @return
*/
public static Object get(String key){
if (key==null || key.trim().length()==0) {
return null;
}
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()<localCacheData.getTimeoutTime()) {
return localCacheData.getVal();
} else {
remove(key);
return null;
}
}
/**
* clean timeout cache
*
* @return
*/
public static boolean cleanTimeoutCache(){
if (!cacheRepository.keySet().isEmpty()) {
for (String key: cacheRepository.keySet()) {
LocalCacheData localCacheData = cacheRepository.get(key);
if (localCacheData!=null && System.currentTimeMillis()>=localCacheData.getTimeoutTime()) {
cacheRepository.remove(key);
}
}
}
return true;
}
}

View File

@@ -0,0 +1,15 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
*
* @author AllDataDC
* @ClassName clickhouse reader dto
* @date 2022/9/29
*/
@Data
public class ClickhouseReaderDto implements Serializable {
}

View File

@@ -0,0 +1,15 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
/**
*
* @author AllDataDC
* @ClassName clickhouse write dto
* @date 2022/9/29
*/
@Data
public class ClickhouseWriterDto implements Serializable {
}

View File

@@ -0,0 +1,32 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author AllDataDC
* @ClassName FlinkXJsonDto
* @Version 2.1.2
* @date 2022/05/05 17:15
*/
@Data
public class FlinkXBatchJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private Long writerDatasourceId;
private List<String> writerTables;
private int templateId;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
}

View File

@@ -0,0 +1,50 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* 构建json dto
*
* @author AllDataDC
* @ClassName FlinkxJsonDto
* @Version 2.1.1
* @date 2022/03/14 07:15
*/
@Data
public class FlinkXJsonBuildDto implements Serializable {
private Long readerDatasourceId;
private List<String> readerTables;
private List<String> readerColumns;
private Long writerDatasourceId;
private List<String> writerTables;
private List<String> writerColumns;
private HiveReaderDto hiveReader;
private HiveWriterDto hiveWriter;
private HbaseReaderDto hbaseReader;
private HbaseWriterDto hbaseWriter;
private RdbmsReaderDto rdbmsReader;
private RdbmsWriterDto rdbmsWriter;
private MongoDBReaderDto mongoDBReader;
private MongoDBWriterDto mongoDBWriter;
private ClickhouseReaderDto clickhouseReader;
private ClickhouseWriterDto clickhouseWriter;
}

View File

@@ -0,0 +1,17 @@
package com.platform.admin.dto;
import lombok.Data;
import java.io.Serializable;
@Data
public class HbaseReaderDto implements Serializable {
private String readerMaxVersion;
private String readerMode;
private Range readerRange;
}

Some files were not shown because too many files have changed in this diff Show More