使用场景:为了跟踪老项目中多过程的大量数据流转请况,自定义生成需要的JSON格式日志,并通过kafka–ES(go实现类ELK方式,本文未展示)实现数据流转情况跟踪分析。
(本文展示的是处理过后的基本实现代码;水平有限,供交流分享,如有错误,还请指教,谢谢~~)
将此功能封装到了common公共模块,供其他功能模块调用
<dependency>
<groupId>project</groupId>
<artifactId>project-common</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
代码思路:
1=>自定义CustomLogFilter过滤器
2=>自定义CustomLevel日志级别
3=>自定义CustomLogAppender附加规则
4=>自定义RollingCalendar日志时间格式
5=>自定义CustomLog日志类实现类
6=>项目依赖端log输出
1.自定义CustomLogFilter过滤器
package com.common.filter;
import org.apache.log4j.spi.Filter;
import org.apache.log4j.spi.LoggingEvent;
/**
* <pre>
* 对象功能:自定义数据日志过滤器
* </pre>
*/
public class CustomLogFilter extends Filter {
boolean acceptOnMatch = false;
private String levelMin;
private String levelMax;
public String getLevelMin() {
return levelMin;
}
public void setLevelMin(String levelMin) {
this.levelMin = levelMin;
}
public String getLevelMax() {
return levelMax;
}
public void setLevelMax(String levelMax) {
this.levelMax = levelMax;
}
public boolean isAcceptOnMatch() {
return acceptOnMatch;
}
public void setAcceptOnMatch(boolean acceptOnMatch) {
this.acceptOnMatch = acceptOnMatch;
}
@Override
public int decide(LoggingEvent lgEvent) {
int inputLevel = lgEvent.getLevel().toInt();
if (inputLevel >= getLevel(levelMin) && inputLevel <= getLevel(levelMax)) {
return 0;
}
return -1;
}
private int getLevel(String level) {
level = level.toUpperCase();
if (level.equals("DATADETAIL")) {
return LevelType.DATADETAIL.getType();
}
if (level.equals("OFF")) {
return LevelType.OFF.getType();
}
if (level.equals("FATAL")) {
return LevelType.FATAL.getType();
}
if (level.equals("ERROR")) {
return LevelType.ERROR.getType();
}
if (level.equals("INFO")) {
return LevelType.INFO.getType();
}
if (level.equals("WARN")) {
return LevelType.WARN.getType();
}
if (level.equals("DEBUG")) {
return LevelType.DEBUG.getType();
}
if (level.equals("ALL")) {
return LevelType.ALL.getType();
}
return LevelType.OFF.getType();
}
private static enum LevelType {
OFF(2147483647),
FATAL(50000),
ERROR(40000),
WARN(30000),
INFO(20000),
DEBUG(10000),
ALL(-2147483648),
CUSTOM(25000);
int type;
public int getType() {
return type;
}
private LevelType(int type) {
this.type = type;
}
}
}
2.自定义CustomLevel日志级别
package com.common.log;
import org.apache.log4j.Level;
import org.apache.log4j.net.SyslogAppender;
/**
* <pre>
* 对象功能:定义日志级别
* </pre>
*/
public interface CustomLevel{
/**
* 自定义级别名称,以及级别范围
*/
public static final Level CUSTOM= new CustomLog.CustomLogLevel(25000,"CUSTOM", SyslogAppender.LOG_LOCAL0);
}
3.自定义CustomLogAppender附加器
package com.common.log;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Priority;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import org.apache.log4j.Layout;
import org.apache.log4j.helpers.CountingQuietWriter;
import org.apache.log4j.helpers.LogLog;
import org.apache.log4j.helpers.OptionConverter;
import org.apache.log4j.spi.LoggingEvent;
/**
* <pre>
* 对象功能:自定义Appender,复写DailyRollingFileAppender
* </pre>
*/
public class CustomLogAppender extends FileAppender {
@Override
public boolean isAsSevereAsThreshold(Priority priority) {
return CustomLevel.CUSTOM.equals(priority);
}
static final int TOP_OF_TROUBLE = -1;
static final int TOP_OF_MINUTE = 0;
static final int TOP_OF_HOUR = 1;
static final int HALF_DAY = 2;
static final int TOP_OF_DAY = 3;
static final int TOP_OF_WEEK = 4;
static final int TOP_OF_MONTH = 5;
/**
* The default maximum file size is 10MB.
*/
protected long maxFileSize = 10 * 1024 * 1024;
/**
* There is one backup file by default.
*/
protected int maxBackupIndex = 1;
private String datePattern = "'.'yyyy-MM-dd";
private String scheduledFilename;
/**
* The next time we estimate a rollover should occur.
*/
private long nextCheck = System.currentTimeMillis() - 1;
Date now = new Date();
SimpleDateFormat sdf;
RollingCalendar rc = new RollingCalendar();
int checkPeriod = TOP_OF_TROUBLE;
// The gmtTimeZone is used only in computeCheckPeriod() method.
static final TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT");
/**
* The default constructor does nothing.
*/
public CustomLogAppender() {
}
public CustomLogAppender(Layout layout, String filename,
String datePattern) throws IOException {
super(layout, filename, true);
this.datePattern = datePattern;
activateOptions();
}
public long getMaximumFileSize() {
return maxFileSize;
}
public void setMaximumFileSize(long maxFileSize) {
this.maxFileSize = maxFileSize;
}
public void setMaxFileSize(String value) {
maxFileSize = OptionConverter.toFileSize(value, maxFileSize + 1);
}
/**
* Returns the value of the <b>MaxBackupIndex</b> option.
*/
public int getMaxBackupIndex() {
return maxBackupIndex;
}
public void setMaxBackupIndex(int maxBackups) {
this.maxBackupIndex = maxBackups;
}
/**
* The <b>DatePattern</b> takes a string in the same format as expected by
* {@link SimpleDateFormat}. This options determines the rollover schedule.
*/
public void setDatePattern(String pattern) {
datePattern = pattern;
}
/** Returns the value of the <b>DatePattern</b> option. */
public String getDatePattern() {
return datePattern;
}
public void activateOptions() {
super.activateOptions();
if (datePattern != null && fileName != null) {
now.setTime(System.currentTimeMillis());
sdf = new SimpleDateFormat(datePattern);
int type = computeCheckPeriod();
printPeriodicity(type);
rc.setType(type);
File file = new File(fileName);
scheduledFilename = fileName
+ sdf.format(new Date(file.lastModified()));
} else {
LogLog.error("Either File or DatePattern options are not set for appender ["
+ name + "].");
}
}
void printPeriodicity(int type) {
switch (type) {
case TOP_OF_MINUTE:
LogLog.debug("Appender [" + name + "] to be rolled every minute.");
break;
case TOP_OF_HOUR:
LogLog.debug("Appender [" + name
+ "] to be rolled on top of every hour.");
break;
case HALF_DAY:
LogLog.debug("Appender [" + name
+ "] to be rolled at midday and midnight.");
break;
case TOP_OF_DAY:
LogLog.debug("Appender [" + name + "] to be rolled at midnight.");
break;
case TOP_OF_WEEK:
LogLog.debug("Appender [" + name
+ "] to be rolled at start of week.");
break;
case TOP_OF_MONTH:
LogLog.debug("Appender [" + name
+ "] to be rolled at start of every month.");
break;
default:
LogLog.warn("Unknown periodicity for appender [" + name + "].");
}
}
int computeCheckPeriod() {
RollingCalendar rollingCalendar = new RollingCalendar(gmtTimeZone,
Locale.ENGLISH);
// set sate to 1970-01-01 00:00:00 GMT
Date epoch = new Date(0);
if (datePattern != null) {
for (int i = TOP_OF_MINUTE; i <= TOP_OF_MONTH; i++) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
datePattern);
simpleDateFormat.setTimeZone(gmtTimeZone); // do all date
// formatting in GMT
String r0 = simpleDateFormat.format(epoch);
rollingCalendar.setType(i);
Date next = new Date(rollingCalendar.getNextCheckMillis(epoch));
String r1 = simpleDateFormat.format(next);
// System.out.println("Type = "+i+", r0 = "+r0+", r1 = "+r1);
if (r0 != null && r1 != null && !r0.equals(r1)) {
return i;
}
}
}
return TOP_OF_TROUBLE; // Deliberately head for trouble...
}
public// synchronization not necessary since doAppend is alreasy synched
void sizeRollOver() {
File target;
File file;
LogLog.debug("rolling over count="
+ ((CountingQuietWriter) qw).getCount());
LogLog.debug("maxBackupIndex=" + maxBackupIndex);
String datedFilename = fileName + sdf.format(now);
if (maxBackupIndex > 0) {
// Delete the oldest file, to keep Windows happy.
file = new File(datedFilename + '.' + maxBackupIndex);
if (file.exists())
file.delete();
// Map {(maxBackupIndex - 1), ..., 2, 1} to {maxBackupIndex, ..., 3,
// 2}
for (int i = maxBackupIndex - 1; i >= 1; i--) {
file = new File(datedFilename + "." + i);
if (file.exists()) {
target = new File(datedFilename + '.' + (i + 1));
LogLog.debug("Renaming file " + file + " to " + target);
file.renameTo(target);
}
}
// Rename fileName to datedFilename.1
target = new File(datedFilename + "." + 1);
this.closeFile(); // keep windows happy.
file = new File(fileName);
LogLog.debug("Renaming file " + file + " to " + target);
file.renameTo(target);
}else if (maxBackupIndex < 0){//infinite number of files
//find the max backup index
for (int i = 1; i < Integer.MAX_VALUE; i++) {
target = new File(datedFilename + "." + i);
if (! target.exists()) {//Rename fileName to datedFilename.i
this.closeFile();
file = new File(fileName);
file.renameTo(target);
LogLog.debug("Renaming file " + file + " to " + target);
break;
}
}
}
try {
// This will also close the file. This is OK since multiple
// close operations are safe.
this.setFile(fileName, false, bufferedIO, bufferSize);
} catch (IOException e) {
LogLog.error("setFile(" + fileName + ", false) call failed.", e);
}
scheduledFilename = datedFilename;
}
public synchronized void setFile(String fileName, boolean append,
boolean bufferedIO, int bufferSize) throws IOException {
super.setFile(fileName, append, this.bufferedIO, this.bufferSize);
if (append) {
File f = new File(fileName);
((CountingQuietWriter) qw).setCount(f.length());
}
}
protected void setQWForFiles(Writer writer) {
this.qw = new CountingQuietWriter(writer, errorHandler);
}
/**
* Rollover the current file to a new file.
*/
void timeRollOver() throws IOException {
/* Compute filename, but only if datePattern is specified */
if (datePattern == null) {
errorHandler.error("Missing DatePattern option in rollOver().");
return;
}
String datedFilename = fileName + sdf.format(now);
// It is too early to roll over because we are still within the
// bounds of the current interval. Rollover will occur once the
// next interval is reached.
if (scheduledFilename.equals(datedFilename)) {
return;
}
// close current file, and rename it to datedFilename
this.closeFile();
File target = new File(scheduledFilename);
if (target.exists()) {
target.delete();
}
File file = new File(fileName);
boolean result = file.renameTo(target);
if (result) {
LogLog.debug(fileName + " -> " + scheduledFilename);
} else {
LogLog.error("Failed to rename [" + fileName + "] to ["
+ scheduledFilename + "].");
}
try {
// This will also close the file. This is OK since multiple
// close operations are safe.
super.setFile(fileName, false, this.bufferedIO, this.bufferSize);
} catch (IOException e) {
errorHandler.error("setFile(" + fileName + ", false) call failed.");
}
scheduledFilename = datedFilename;
}
protected void subAppend(LoggingEvent event) {
long n = System.currentTimeMillis();
if (n >= nextCheck) {
now.setTime(n);
nextCheck = rc.getNextCheckMillis(now);
try {
timeRollOver();
} catch (IOException ioe) {
LogLog.error("rollOver() failed.", ioe);
}
} else if ((fileName != null)
&& ((CountingQuietWriter) qw).getCount() >= maxFileSize) {
sizeRollOver();
}
super.subAppend(event);
}
}
4.自定义RollingCalendar日志时间格式
package com.common.log;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.TimeZone;
/**
* <pre>
* 对象功能:自定义RollingCalendar 用于日志切割
* </pre>
*/
class RollingCalendar extends GregorianCalendar {
int type = CustomLogAppender.TOP_OF_TROUBLE;
RollingCalendar() {
super();
}
RollingCalendar(TimeZone tz, Locale locale) {
super(tz, locale);
}
void setType(int type) {
this.type = type;
}
public long getNextCheckMillis(Date now) {
return getNextCheckDate(now).getTime();
}
public Date getNextCheckDate(Date now) {
this.setTime(now);
switch (type) {
case CustomLogAppender.TOP_OF_MINUTE:
this.set(Calendar.SECOND, 0);
this.set(Calendar.MILLISECOND, 0);
this.add(Calendar.MINUTE, 1);
break;
case CustomLogAppender.TOP_OF_HOUR:
this.set(Calendar.MINUTE, 0);
this.set(Calendar.SECOND, 0);
this.set(Calendar.MILLISECOND, 0);
this.add(Calendar.HOUR_OF_DAY, 1);
break;
case CustomLogAppender.HALF_DAY:
this.set(Calendar.MINUTE, 0);
this.set(Calendar.SECOND, 0);
this.set(Calendar.MILLISECOND, 0);
int hour = get(Calendar.HOUR_OF_DAY);
if (hour < 12) {
this.set(Calendar.HOUR_OF_DAY, 12);
} else {
this.set(Calendar.HOUR_OF_DAY, 0);
this.add(Calendar.DAY_OF_MONTH, 1);
}
break;
case CustomLogAppender.TOP_OF_DAY:
this.set(Calendar.HOUR_OF_DAY, 0);
this.set(Calendar.MINUTE, 0);
this.set(Calendar.SECOND, 0);
this.set(Calendar.MILLISECOND, 0);
this.add(Calendar.DATE, 1);
break;
case CustomLogAppender.TOP_OF_WEEK:
this.set(Calendar.DAY_OF_WEEK, getFirstDayOfWeek());
this.set(Calendar.HOUR_OF_DAY, 0);
this.set(Calendar.SECOND, 0);
this.set(Calendar.MILLISECOND, 0);
this.add(Calendar.WEEK_OF_YEAR, 1);
break;
case CustomLogAppender.TOP_OF_MONTH:
this.set(Calendar.DATE, 1);
this.set(Calendar.HOUR_OF_DAY, 0);
this.set(Calendar.SECOND, 0);
this.set(Calendar.MILLISECOND, 0);
this.add(Calendar.MONTH, 1);
break;
default:
throw new IllegalStateException("Unknown periodicity type.");
}
return getTime();
}
}
5.自定义CustomLog日志类实现类
package com.common.log;
import com.alibaba.fastjson.JSONObject;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import java.util.*;
/**
* <pre>
* 对象功能:自定义DataLog日志类
* </pre>
*/
public class CustomLog {
private static Logger logger = Logger.getLogger(CustomLog.class);
private static class Inner{
private static final CustomLog ISNTANCE = new CustomLog();
}
public CustomLog() {
}
/**
* 创建CustomLog 实例
* @return CustomLog
* */
public static CustomLog getInstance() {
return Inner.ISNTANCE;
}
/**
* 继承Level
*/
public static class CustomLogLevel extends Level{
private static final long serialVersionUID = 1L;
public CustomLogLevel(int level, String levelStr, int syslogEquivalent) {
super(level, levelStr, syslogEquivalent);
}
}
/**
* 使用日志打印logger中的log方法
*
* @param logger
* @param objLogInfo
*/
public static void CUSTOM(Logger logger,Object objLogInfo){
logger.log( CustomLevel.CUSTOM, objLogInfo);
}
/**
* 使用日志打印logger中的log方法
* @param objLogInfo
*/
public static void CUSTOM(Object objLogInfo){
logger.log( CustomLevel.CUSTOM, objLogInfo);
}
/**
* 数据状态跟踪日志 单条数据
* 可根据自己的实际需求去定义各个接口
*/
public static void customLogOut(Map<String, Object> map){
CUSTOM(JSONObject.toJSONString(map));
}
}
6.程序端log输出
6.1==>log4j.properties配置
#自定义数据处理customLog
# 指定自定义CustomLogAppender
log4j.appender.CUSTOM=com.common.log.CustomLogAppender
# 指定日志输出路径
log4j.appender.CUSTOM.File=/data/logs/log/custom.log
# 日志追加
log4j.appender.CUSTOM.Append=true
# 单个日志文件最大限制
log4j.appender.CUSTOM.MaxFileSize=500MB
# 设置自定义阈
log4j.appender.CUSTOM.Threshold=CUSTOM
# 日志命名,按日期命名,当大于500M,以名称-1,-2....命名
log4j.appender.CUSTOM.DatePattern='-'yyyy-MM-dd'.log'
# 日志格式
log4j.appender.CUSTOM.layout=org.apache.log4j.PatternLayout
# 设置CustomLogFilter级别
com.crowd.common.filter.CustomLogFilter.levelMin=CUSTOM
com.crowd.common.filter.CustomLogFilter.levelMax=CUSTOM
6.2==>单元测试
package com.common;
import com.common.log.CustomLog;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.HashMap;
@RunWith(SpringRunner.class)
@SpringBootTest
public class CommonApplicationTests {
private static CustomLog customLog = CustomLog.getInstance();
@Test
public void test() {
HashMap<Object, Object> map = new HashMap<>();
map.put("name","CUSTOM");
customLog.customLogOut(map );
}
}
//结果
{“name”:“CUSTOM”}
本文供log4j自定义log输出参考,相关需求可直接在项目使用ELK处理实现日志分析