版本2.0更新
This commit is contained in:
135
ddp-runner-api/pom.xml
Normal file
135
ddp-runner-api/pom.xml
Normal file
@@ -0,0 +1,135 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>ddp</artifactId>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>ddp-runner-api</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<!-- 数据中心 -->
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-datax-realtime-field</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- 策略中心 -->
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-guide-rule</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-script-rule</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-score-card</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-list-library</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-ai-model</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-decision-table</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-decision-tree</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-collection-rule</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-tag</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-strategyx-data-clean</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- 场景引擎 -->
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-enginex-risk-engine</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-enginex-dataflow-engine</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-enginex-personas-engine</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-enginex-runner-node</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- 权限系统 -->
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-authx</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- 监控中心 -->
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-monitor</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- 分析中心 -->
|
||||
<dependency>
|
||||
<groupId>com.fibo.ddp</groupId>
|
||||
<artifactId>ddp-analysis</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/java</directory>
|
||||
<!-- 此配置不可缺,否则mybatis的Mapper.xml将会丢失 -->
|
||||
<includes>
|
||||
<include>**/*.xml</include>
|
||||
</includes>
|
||||
</resource>
|
||||
<!--指定资源的位置-->
|
||||
<resource>
|
||||
<directory>src/main/resources</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,21 @@
|
||||
package com.fibo.ddp.runner.api;
|
||||
|
||||
import org.mybatis.spring.annotation.MapperScan;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.scheduling.annotation.EnableAsync;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
@SpringBootApplication
|
||||
@EnableTransactionManagement
|
||||
@MapperScan({"com.fibo.ddp.common.dao.**"})
|
||||
@ComponentScan(basePackages = "com.fibo.ddp.**")
|
||||
@EnableAsync
|
||||
public class JarDdpRunnerApiApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(JarDdpRunnerApiApplication.class, args);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
package com.fibo.ddp.runner.api.config;
|
||||
|
||||
import lombok.Data;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
@Data
|
||||
public class ConfigHolder {
|
||||
|
||||
//redisConfig
|
||||
@Value("${redis.host}")
|
||||
private String redisHost;
|
||||
@Value("${redis.port}")
|
||||
private int redisPort;
|
||||
@Value("${redis.db}")
|
||||
private int redisDb;
|
||||
@Value("${redis.password}")
|
||||
private String redisPwd;
|
||||
@Value("${redis.pool.maxTotal}")
|
||||
private int redisMaxTotal;
|
||||
@Value("${redis.pool.maxIdle}")
|
||||
private int redisMaxIdle;
|
||||
@Value("${redis.pool.maxWait}")
|
||||
private int redisMaxWait;
|
||||
@Value("${redis.pool.timeout}")
|
||||
private int redisTimeout;
|
||||
|
||||
// 业务逻辑是否使用缓存
|
||||
@Value("${switch.use.cache}")
|
||||
private String cacheSwitch;
|
||||
// canal缓存同步是否开启
|
||||
@Value("${switch.canal.cache}")
|
||||
private String canalCacheSwitch;
|
||||
// canal主机地址
|
||||
@Value("${canal.hostname}")
|
||||
private String canalHostName;
|
||||
// canal端口
|
||||
@Value("${canal.port}")
|
||||
private int canalPort;
|
||||
|
||||
//jdbcConfig
|
||||
/*@Value("${jdbc.url}")
|
||||
private String jdbcUrl;
|
||||
@Value("${jdbc.driver}")
|
||||
private String DriverName;
|
||||
@Value("${pool.maxPoolSize}")
|
||||
private int maxPoolSize;
|
||||
@Value("${jdbc.username}")
|
||||
private String jdbcUserName;
|
||||
@Value("${jdbc.password}")
|
||||
private String jdbcPwd;
|
||||
@Value("${pool.maxWait}")
|
||||
private int jdbcMaxWait;
|
||||
@Value("${pool.timeBetweenEvictionRunsMillis}")
|
||||
private int timeBetweenEvictionRunsMillis;
|
||||
@Value("${pool.minEvictableIdleTimeMillis}")
|
||||
private int minEvictableIdleTimeMillis;
|
||||
@Value("${pool.validationQuery}")
|
||||
private String validationQuery;
|
||||
|
||||
//rabbitconfig
|
||||
@Value("${rabbitMQ.host}")
|
||||
private String rabbitHost;
|
||||
@Value("${rabbitMQ.port}")
|
||||
private int rabbitPort;
|
||||
@Value("${rabbitMQ.username}")
|
||||
private String rabbitUsername;
|
||||
@Value("${rabbitMQ.password}")
|
||||
private String rabbitPassword;*/
|
||||
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
package com.fibo.ddp.runner.api.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||
import redis.clients.jedis.JedisPool;
|
||||
import redis.clients.jedis.JedisPoolConfig;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
|
||||
@Configuration
|
||||
public class ConfigurationContainor {
|
||||
|
||||
@Resource
|
||||
private ConfigHolder configHolder;
|
||||
|
||||
@Bean(name = "threadPoolTaskExecutor")
|
||||
ThreadPoolTaskExecutor threadPoolTaskExecutor(){
|
||||
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||
executor.setCorePoolSize(2000);
|
||||
executor.setMaxPoolSize(10000);
|
||||
executor.setQueueCapacity(100000);
|
||||
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
|
||||
return executor;
|
||||
}
|
||||
|
||||
@Bean(name = "jedisPool")
|
||||
public JedisPool jedisPool(){
|
||||
JedisPoolConfig config = new JedisPoolConfig();
|
||||
config.setMaxTotal(configHolder.getRedisMaxTotal());
|
||||
config.setMaxIdle(configHolder.getRedisMaxIdle());
|
||||
config.setMaxWaitMillis(configHolder.getRedisMaxWait());
|
||||
config.setTestOnBorrow(true);
|
||||
// config.setTestOnReturn(true);
|
||||
|
||||
JedisPool pool = new JedisPool(config,
|
||||
configHolder.getRedisHost(),
|
||||
configHolder.getRedisPort(),
|
||||
configHolder.getRedisTimeout(),
|
||||
configHolder.getRedisPwd(),
|
||||
configHolder.getRedisDb());
|
||||
return pool;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package com.fibo.ddp.runner.api.config;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean;
|
||||
import com.fibo.ddp.common.service.datax.runner.mysql.DynamicDataSource;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.mybatis.spring.SqlSessionTemplate;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||
import org.springframework.core.io.support.ResourcePatternResolver;
|
||||
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@Configuration
|
||||
public class DataSourceConfig {
|
||||
|
||||
// 将所有前缀为spring.datasource.druid下的配置项都加载DataSource中
|
||||
@ConfigurationProperties(prefix = "spring.datasource.druid")
|
||||
@Bean
|
||||
public DruidDataSource druidDataSource() {
|
||||
return new DruidDataSource();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public DynamicDataSource dynamicDataSource(@Qualifier("druidDataSource") DruidDataSource defaultDataSource) {
|
||||
DynamicDataSource dynamicDataSource = DynamicDataSource.getInstance();
|
||||
Map<Object,Object> map = new HashMap<>();
|
||||
map.put("default", defaultDataSource);
|
||||
dynamicDataSource.setTargetDataSources(map);
|
||||
dynamicDataSource.setDefaultTargetDataSource(defaultDataSource);
|
||||
return dynamicDataSource;
|
||||
}
|
||||
|
||||
@Bean(name = "dataSourceTransactionManager")
|
||||
public DataSourceTransactionManager dataSourceTransactionManager(@Qualifier("druidDataSource") DruidDataSource defaultDataSource){
|
||||
DataSourceTransactionManager dm = new DataSourceTransactionManager();
|
||||
dm.setDataSource(defaultDataSource);
|
||||
return dm;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public SqlSessionFactory sqlSessionFactory(
|
||||
@Qualifier("dynamicDataSource") DataSource dynamicDataSource)
|
||||
throws Exception {
|
||||
MybatisSqlSessionFactoryBean bean = new MybatisSqlSessionFactoryBean();
|
||||
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
|
||||
Resource[] mapperXmlResource = resolver.getResources("classpath*:**/*Mapper.xml");
|
||||
bean.setDataSource(dynamicDataSource);
|
||||
bean.setMapperLocations(mapperXmlResource);
|
||||
bean.setTypeAliasesPackage("com.fibo.ddp.common.model");
|
||||
bean.getObject().getConfiguration().setMapUnderscoreToCamelCase(true);
|
||||
bean.getObject().getConfiguration().setCacheEnabled(false);
|
||||
return bean.getObject();
|
||||
}
|
||||
|
||||
@Bean(name = "sqlSessionTemplate")
|
||||
public SqlSessionTemplate sqlSessionTemplate(
|
||||
@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory)
|
||||
throws Exception {
|
||||
return new SqlSessionTemplate(sqlSessionFactory);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
package com.fibo.ddp.runner.api.config;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.core.task.SimpleAsyncTaskExecutor;
|
||||
import org.springframework.http.client.AsyncClientHttpRequestFactory;
|
||||
import org.springframework.http.client.ClientHttpRequestFactory;
|
||||
import org.springframework.http.client.SimpleClientHttpRequestFactory;
|
||||
import org.springframework.web.client.AsyncRestTemplate;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
/**
|
||||
* RestTemplate配置
|
||||
*/
|
||||
@Configuration
|
||||
public class RestTemplateConfig {
|
||||
|
||||
@Bean
|
||||
public RestTemplate restTemplate(@Qualifier("clientHttpRequestFactory") ClientHttpRequestFactory factory){
|
||||
return new RestTemplate(factory);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public AsyncRestTemplate asyncRestTemplate(@Qualifier("asyncClientHttpRequestFactory") AsyncClientHttpRequestFactory factory){
|
||||
return new AsyncRestTemplate(factory);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ClientHttpRequestFactory clientHttpRequestFactory(){
|
||||
// 创建一个 httpCilent 简单工厂
|
||||
SimpleClientHttpRequestFactory factory = new SimpleClientHttpRequestFactory();
|
||||
// 设置连接超时
|
||||
factory.setConnectTimeout(15000);
|
||||
// 设置读取超时
|
||||
factory.setReadTimeout(5000);
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public AsyncClientHttpRequestFactory asyncClientHttpRequestFactory(){
|
||||
// 创建一个 httpCilent 简单工厂
|
||||
SimpleClientHttpRequestFactory factory = new SimpleClientHttpRequestFactory();
|
||||
// 设置连接超时
|
||||
// factory.setConnectTimeout(15000);
|
||||
// 设置读取超时
|
||||
// factory.setReadTimeout(5000);
|
||||
//设置异步任务(线程不会重用,每次调用时都会重新启动一个新的线程)
|
||||
factory.setTaskExecutor(new SimpleAsyncTaskExecutor());
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
59
ddp-runner-api/src/main/resources/application-dev.properties
Normal file
59
ddp-runner-api/src/main/resources/application-dev.properties
Normal file
@@ -0,0 +1,59 @@
|
||||
server.port=8081
|
||||
|
||||
logging.config=classpath:logging-config.xml
|
||||
|
||||
# mysql
|
||||
spring.datasource.druid.url=jdbc:mysql://localhost:3306/riskmanage?serverTimezone=Asia/Shanghai&useUnicode=true&characterEncoding=utf-8&allowMultiQueries=true
|
||||
spring.datasource.druid.username=root
|
||||
spring.datasource.druid.password=enginex
|
||||
spring.datasource.druid.driver-class-name=com.mysql.cj.jdbc.Driver
|
||||
spring.datasource.druid.initialSize=20
|
||||
spring.datasource.druid.minIdle=20
|
||||
spring.datasource.druid.maxActive=100
|
||||
spring.datasource.druid.maxWait=60000
|
||||
spring.datasource.druid.timeBetweenEvictionRunsMillis=60000
|
||||
spring.datasource.druid.minEvictableIdleTimeMillis=300000
|
||||
spring.datasource.druid.testWhileIdle=true
|
||||
spring.datasource.druid.testOnBorrow=true
|
||||
spring.datasource.druid.testOnReturn=false
|
||||
spring.datasource.druid.poolPreparedStatements=true
|
||||
spring.datasource.druid.maxOpenPreparedStatements=20
|
||||
spring.datasource.druid.validationQuery=SELECT 1
|
||||
spring.datasource.druid.validation-query-timeout=500
|
||||
spring.datasource.druid.filters=stat
|
||||
|
||||
# redis
|
||||
redis.host=localhost
|
||||
redis.port=6379
|
||||
redis.db=1
|
||||
redis.password=enginex
|
||||
redis.pool.maxTotal=3000
|
||||
redis.pool.maxIdle=100
|
||||
redis.pool.maxWait=1000
|
||||
redis.pool.timeout=100000
|
||||
|
||||
# mail
|
||||
spring.mail.host=smtp.exmail.qq.com
|
||||
spring.mail.username=xxx
|
||||
spring.mail.password=xxx
|
||||
spring.mail.port=465
|
||||
spring.mail.properties.mail.smtp.auth=true
|
||||
spring.mail.properties.mail.smtp.timeout=50000
|
||||
spring.mail.properties.mail.smtp.starttls.enable=true
|
||||
spring.mail.properties.mail.smtp.socketFactory.port=465
|
||||
spring.mail.properties.mail.smtp.socketFactory.class=javax.net.ssl.SSLSocketFactory
|
||||
spring.mail.properties.mail.smtp.socketFactory.fallback=false
|
||||
|
||||
# hbase
|
||||
spring.data.hbase.quorum: localhost:2181
|
||||
spring.data.hbase.rootDir: /usr/local/hbase/datatest
|
||||
spring.data.hbase.nodeParent: /hbase
|
||||
|
||||
# canal
|
||||
switch.use.cache=off
|
||||
switch.canal.cache=off
|
||||
canal.hostname=localhost
|
||||
canal.port=11111
|
||||
|
||||
# \u76D1\u63A7\u4E2D\u5FC3 \u6570\u636E\u5B58\u50A8\u65B9\u5F0F mysql \u6216\u8005 hbase
|
||||
monitor.data.storage.type=mysql
|
||||
1
ddp-runner-api/src/main/resources/application.properties
Normal file
1
ddp-runner-api/src/main/resources/application.properties
Normal file
@@ -0,0 +1 @@
|
||||
spring.profiles.active=dev
|
||||
98
ddp-runner-api/src/main/resources/logging-config.xml
Normal file
98
ddp-runner-api/src/main/resources/logging-config.xml
Normal file
@@ -0,0 +1,98 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--Configuration后面的status,这个用于设置log4j2自身内部的信息输出,可以不设置,当设置成trace时,你会看到log4j2内部各种详细输出-->
|
||||
<!--monitorInterval:Log4j能够自动检测修改配置 文件和重新配置本身,设置间隔秒数-->
|
||||
<configuration monitorInterval="5">
|
||||
<!--日志级别以及优先级排序: OFF > FATAL > ERROR > WARN > INFO > DEBUG > TRACE > ALL -->
|
||||
|
||||
<!--变量配置-->
|
||||
<Properties>
|
||||
<!-- 格式化输出:%date表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度 %msg:日志消息,%n是换行符-->
|
||||
<!-- %logger{36} 表示 Logger 名字最长36个字符 -->
|
||||
<property name="LOG_PATTERN" value="%date{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" />
|
||||
<!-- 定义日志存储的路径 -->
|
||||
<property name="FILE_PATH" value="logs/runner" />
|
||||
<property name="FILE_NAME" value="runner" />
|
||||
</Properties>
|
||||
|
||||
<appenders>
|
||||
|
||||
<console name="Console" target="SYSTEM_OUT">
|
||||
<!--输出日志的格式-->
|
||||
<PatternLayout pattern="${LOG_PATTERN}"/>
|
||||
<!--控制台只输出level及其以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
|
||||
<ThresholdFilter level="debug" onMatch="ACCEPT" onMismatch="DENY"/>
|
||||
</console>
|
||||
|
||||
<!--文件会打印出所有信息,这个log每次运行程序会自动清空,由append属性决定,适合临时测试用-->
|
||||
<File name="Filelog" fileName="${FILE_PATH}/test.log" append="false">
|
||||
<PatternLayout pattern="${LOG_PATTERN}"/>
|
||||
</File>
|
||||
|
||||
<!-- 这个会打印出所有的info及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
|
||||
<RollingFile name="RollingFileInfo" fileName="${FILE_PATH}/info.log" filePattern="${FILE_PATH}/${FILE_NAME}-INFO-%d{yyyy-MM-dd}_%i.log.gz">
|
||||
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
|
||||
<ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="DENY"/>
|
||||
<PatternLayout pattern="${LOG_PATTERN}"/>
|
||||
<Policies>
|
||||
<!--interval属性用来指定多久滚动一次,默认是1 hour-->
|
||||
<TimeBasedTriggeringPolicy interval="1"/>
|
||||
<SizeBasedTriggeringPolicy size="10MB"/>
|
||||
</Policies>
|
||||
<!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件开始覆盖-->
|
||||
<DefaultRolloverStrategy max="15"/>
|
||||
</RollingFile>
|
||||
|
||||
<!-- 这个会打印出所有的warn及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
|
||||
<RollingFile name="RollingFileWarn" fileName="${FILE_PATH}/warn.log" filePattern="${FILE_PATH}/${FILE_NAME}-WARN-%d{yyyy-MM-dd}_%i.log.gz">
|
||||
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
|
||||
<ThresholdFilter level="warn" onMatch="ACCEPT" onMismatch="DENY"/>
|
||||
<PatternLayout pattern="${LOG_PATTERN}"/>
|
||||
<Policies>
|
||||
<!--interval属性用来指定多久滚动一次,默认是1 hour-->
|
||||
<TimeBasedTriggeringPolicy interval="1"/>
|
||||
<SizeBasedTriggeringPolicy size="10MB"/>
|
||||
</Policies>
|
||||
<!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件开始覆盖-->
|
||||
<DefaultRolloverStrategy max="15"/>
|
||||
</RollingFile>
|
||||
|
||||
<!-- 这个会打印出所有的error及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
|
||||
<RollingFile name="RollingFileError" fileName="${FILE_PATH}/error.log" filePattern="${FILE_PATH}/${FILE_NAME}-ERROR-%d{yyyy-MM-dd}_%i.log.gz">
|
||||
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
|
||||
<ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY"/>
|
||||
<PatternLayout pattern="${LOG_PATTERN}"/>
|
||||
<Policies>
|
||||
<!--interval属性用来指定多久滚动一次,默认是1 hour-->
|
||||
<TimeBasedTriggeringPolicy interval="1"/>
|
||||
<SizeBasedTriggeringPolicy size="10MB"/>
|
||||
</Policies>
|
||||
<!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件开始覆盖-->
|
||||
<DefaultRolloverStrategy max="15"/>
|
||||
</RollingFile>
|
||||
|
||||
</appenders>
|
||||
|
||||
<!--Logger节点用来单独指定日志的形式,比如要为指定包下的class指定不同的日志级别等。-->
|
||||
<!--然后定义loggers,只有定义了logger并引入的appender,appender才会生效-->
|
||||
<loggers>
|
||||
|
||||
<!--过滤掉spring和mybatis的一些无用的DEBUG信息-->
|
||||
<logger name="org.mybatis" level="info" additivity="false">
|
||||
<AppenderRef ref="Console"/>
|
||||
</logger>
|
||||
<!--监控系统信息-->
|
||||
<!--若是additivity设为false,则 子Logger 只会在自己的appender里输出,而不会在 父Logger 的appender里输出。-->
|
||||
<Logger name="org.springframework" level="info" additivity="false">
|
||||
<AppenderRef ref="Console"/>
|
||||
</Logger>
|
||||
|
||||
<root level="info">
|
||||
<appender-ref ref="Console"/>
|
||||
<appender-ref ref="Filelog"/>
|
||||
<appender-ref ref="RollingFileInfo"/>
|
||||
<appender-ref ref="RollingFileWarn"/>
|
||||
<appender-ref ref="RollingFileError"/>
|
||||
</root>
|
||||
</loggers>
|
||||
|
||||
</configuration>
|
||||
Reference in New Issue
Block a user