32 changed files with 656 additions and 948 deletions
@ -1,178 +0,0 @@ |
|||||
package org.nl.modules.lucene.common; |
|
||||
|
|
||||
import com.alibaba.fastjson.JSONObject; |
|
||||
import org.apache.commons.io.FileUtils; |
|
||||
import org.apache.lucene.analysis.Analyzer; |
|
||||
import org.apache.lucene.document.Document; |
|
||||
import org.apache.lucene.document.Field; |
|
||||
import org.apache.lucene.document.TextField; |
|
||||
import org.apache.lucene.index.IndexWriter; |
|
||||
import org.apache.lucene.index.IndexWriterConfig; |
|
||||
import org.apache.lucene.store.Directory; |
|
||||
import org.apache.lucene.store.FSDirectory; |
|
||||
import org.springframework.beans.factory.annotation.Value; |
|
||||
import org.wltea.analyzer.lucene.IKAnalyzer; |
|
||||
|
|
||||
import java.io.BufferedReader; |
|
||||
import java.io.File; |
|
||||
import java.io.FileReader; |
|
||||
import java.io.IOException; |
|
||||
import java.nio.file.Paths; |
|
||||
import java.util.Set; |
|
||||
|
|
||||
/** |
|
||||
* lucene索引器 |
|
||||
*/ |
|
||||
public class Indexer { |
|
||||
/** |
|
||||
* 写索引实例 |
|
||||
*/ |
|
||||
private IndexWriter writer; |
|
||||
|
|
||||
public IndexWriter getWriter() { |
|
||||
return writer; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 构造方法,实例化IndexWriter |
|
||||
* @param indexDir |
|
||||
* @throws Exception |
|
||||
*/ |
|
||||
public Indexer(String indexDir) throws Exception { |
|
||||
Directory dir = FSDirectory.open(Paths.get(indexDir)); |
|
||||
//标准分词器,会自动去掉空格啊,is a the等单词
|
|
||||
// Analyzer analyzer = new StandardAnalyzer();
|
|
||||
Analyzer analyzer = new IKAnalyzer(); |
|
||||
//将标准分词器配到写索引的配置中
|
|
||||
IndexWriterConfig config = new IndexWriterConfig(analyzer); |
|
||||
//实例化写索引对象
|
|
||||
writer = new IndexWriter(dir, config); |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 索引指定目录下的所有文件 |
|
||||
* @param dataDir |
|
||||
* @return |
|
||||
* @throws Exception |
|
||||
*/ |
|
||||
public int indexAll(String dataDir) throws Exception { |
|
||||
// 获取该路径下的所有文件
|
|
||||
File[] files = new File(dataDir).listFiles(); |
|
||||
if (null != files) { |
|
||||
for (File file : files) { |
|
||||
//调用下面的indexFile方法,对每个文件进行索引
|
|
||||
indexFile(file); |
|
||||
} |
|
||||
} |
|
||||
//返回索引的文件数
|
|
||||
// return writer.numDocs();
|
|
||||
return writer.numRamDocs(); |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 索引指定的文件 |
|
||||
* @param file |
|
||||
* @throws Exception |
|
||||
*/ |
|
||||
private void indexFile(File file) throws Exception { |
|
||||
System.out.println("索引文件的路径:" + file.getCanonicalPath()); |
|
||||
//调用下面的getDocument方法,获取该文件的document
|
|
||||
Document doc = getDocument(file); |
|
||||
//添加索引文档
|
|
||||
//Document doc = json2Doc(jsonDoc);
|
|
||||
// Document doc = new Document();
|
|
||||
// doc.add(new TextField("content", jsonDoc, Field.Store.YES));
|
|
||||
Field fieldContent=new TextField("fieldContent", FileUtils.readFileToString(null,"UTF-8"), Field.Store.YES); |
|
||||
|
|
||||
//将doc添加到索引中
|
|
||||
writer.addDocument(doc); |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* 获取文档,文档里再设置每个字段,就类似于数据库中的一行记录 |
|
||||
* @param file |
|
||||
* @return |
|
||||
* @throws Exception |
|
||||
*/ |
|
||||
private Document getDocument(File file) throws Exception { |
|
||||
Document doc = new Document(); |
|
||||
//开始添加字段
|
|
||||
//添加内容
|
|
||||
doc.add(new TextField("contents", new FileReader(file))); |
|
||||
//添加文件名,并把这个字段存到索引文件里
|
|
||||
doc.add(new TextField("fileName", file.getName(), Field.Store.YES)); |
|
||||
//添加文件路径
|
|
||||
doc.add(new TextField("fullPath", file.getCanonicalPath(), Field.Store.YES)); |
|
||||
return doc; |
|
||||
} |
|
||||
public Document json2Doc(String strDoc) { |
|
||||
Document doc = new Document(); |
|
||||
JSONObject jsonDoc = JSONObject.parseObject(strDoc); |
|
||||
Set<String> keys = jsonDoc.keySet(); |
|
||||
for (String key : keys) { |
|
||||
doc.add(new TextField(key, jsonDoc.getString(key), Field.Store.YES)); |
|
||||
} |
|
||||
return doc; |
|
||||
} |
|
||||
|
|
||||
public void addLogIndex(String msg) throws IOException { |
|
||||
//步骤一:创建Directory对象,用于指定索引库的位置 RAMDirectory内存
|
|
||||
Directory directory = FSDirectory.open(new File("D:\\lucene\\index").toPath()); |
|
||||
//步骤二:创建一个IndexWriter对象,用于写索引
|
|
||||
// Analyzer analyzer = new StandardAnalyzer();
|
|
||||
IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new IKAnalyzer(false))); |
|
||||
// indexWriter.deleteAll();//清理所有索引库
|
|
||||
// IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new StandardAnalyzer()));
|
|
||||
//记录索引开始时间
|
|
||||
long startTime = System.currentTimeMillis(); |
|
||||
//步骤三:读取磁盘中文件,对应每一个文件创建一个文档对象
|
|
||||
Document document = new Document(); |
|
||||
// document.add(new TextField("fieldContent", device_id, Field.Store.YES));
|
|
||||
document.add(new TextField("fieldContent", msg, Field.Store.YES)); |
|
||||
indexWriter.addDocument(document); |
|
||||
//记录索引结束时间
|
|
||||
long endTime = System.currentTimeMillis(); |
|
||||
System.out.println("建立索引"+ "共耗时" + (endTime-startTime) + "毫秒"); |
|
||||
indexWriter.commit(); |
|
||||
//步骤八:关闭资源
|
|
||||
indexWriter.close(); |
|
||||
System.out.println("建立索引成功-----关闭资源"); |
|
||||
} |
|
||||
//系统的日志文件路径
|
|
||||
@Value("${logging.file.path}") |
|
||||
private String logUrl; |
|
||||
|
|
||||
public static void main(String[] args)throws IOException { |
|
||||
//步骤一:创建Directory对象,用于指定索引库的位置 RAMDirectory内存
|
|
||||
Directory directory = FSDirectory.open(new File("D:\\lucene\\index").toPath()); |
|
||||
//步骤二:创建一个IndexWriter对象,用于写索引
|
|
||||
// Analyzer analyzer = new StandardAnalyzer();
|
|
||||
IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new IKAnalyzer(false))); |
|
||||
|
|
||||
indexWriter.deleteAll();//清理所有索引库
|
|
||||
// IndexWriter indexWriter=new IndexWriter(directory,new IndexWriterConfig(new StandardAnalyzer()));
|
|
||||
//记录索引开始时间
|
|
||||
long startTime = System.currentTimeMillis(); |
|
||||
//步骤三:读取磁盘中文件,对应每一个文件创建一个文档对象
|
|
||||
File file=new File("D:\\testlog"); |
|
||||
//步骤四:获取文件列表
|
|
||||
File[] files = file.listFiles(); |
|
||||
for (File item:files) { |
|
||||
BufferedReader bufferedReader = new BufferedReader(new FileReader(item)); |
|
||||
String strLine = null; |
|
||||
while(null != (strLine = bufferedReader.readLine())){ |
|
||||
Document document = new Document(); |
|
||||
// document.add(new Field());
|
|
||||
document.add(new TextField("fieldContent", strLine, Field.Store.YES)); |
|
||||
indexWriter.addDocument(document); |
|
||||
} |
|
||||
} |
|
||||
//记录索引结束时间
|
|
||||
long endTime = System.currentTimeMillis(); |
|
||||
System.out.println("建立索引"+ "共耗时" + (endTime-startTime) + "毫秒"); |
|
||||
indexWriter.commit(); |
|
||||
//步骤八:关闭资源
|
|
||||
indexWriter.close(); |
|
||||
System.out.println("建立索引成功-----关闭资源"); |
|
||||
} |
|
||||
} |
|
@ -1,123 +0,0 @@ |
|||||
package org.nl.modules.lucene.common; |
|
||||
/** |
|
||||
* @author ldjun |
|
||||
* @version 1.0 |
|
||||
* @date 2023年08月24日 13:00 |
|
||||
* @desc desc |
|
||||
*/ |
|
||||
|
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent; |
|
||||
import ch.qos.logback.core.AppenderBase; |
|
||||
import cn.hutool.core.date.DateTime; |
|
||||
import cn.hutool.core.date.DateUtil; |
|
||||
import cn.hutool.core.util.ObjectUtil; |
|
||||
import com.alibaba.fastjson.JSONObject; |
|
||||
import org.apache.lucene.analysis.Analyzer; |
|
||||
import org.apache.lucene.document.*; |
|
||||
import org.apache.lucene.index.IndexWriter; |
|
||||
import org.apache.lucene.index.IndexWriterConfig; |
|
||||
import org.apache.lucene.store.Directory; |
|
||||
import org.apache.lucene.store.FSDirectory; |
|
||||
import org.nl.acs.opc.OpcItemDto; |
|
||||
import org.nl.modules.lucene.enums.LogTypeEnum; |
|
||||
import org.nl.modules.lucene.service.LuceneExecuteLogService; |
|
||||
import org.nl.modules.lucene.service.dto.LuceneLogDto; |
|
||||
import org.nl.modules.wql.util.SpringContextHolder; |
|
||||
import org.wltea.analyzer.lucene.IKAnalyzer; |
|
||||
|
|
||||
import java.io.IOException; |
|
||||
import java.nio.file.Paths; |
|
||||
import java.util.Map; |
|
||||
|
|
||||
public class LuceneAppender extends AppenderBase<ILoggingEvent> { |
|
||||
|
|
||||
private Directory index; |
|
||||
private IndexWriter indexWriter; |
|
||||
|
|
||||
|
|
||||
@Override |
|
||||
public void start() { |
|
||||
super.start(); |
|
||||
try { |
|
||||
index = FSDirectory.open(Paths.get(LogMessageConstant.INDEX_DIR)); |
|
||||
} catch (IOException e) { |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
|
|
||||
// 初始化 Lucene 索引
|
|
||||
Analyzer analyzer = new IKAnalyzer(); |
|
||||
IndexWriterConfig config = new IndexWriterConfig(analyzer); |
|
||||
try { |
|
||||
indexWriter = new IndexWriter(index, config); |
|
||||
} catch (IOException e) { |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
|
|
||||
@Override |
|
||||
protected void append(ILoggingEvent event) { |
|
||||
String message = event.getFormattedMessage(); |
|
||||
try { |
|
||||
LuceneLogDto luceneLogDto = JSONObject.parseObject(message, LuceneLogDto.class); |
|
||||
|
|
||||
// LuceneLogDto luceneLogDto = new LuceneLogDto(itemDto.getOpc_server_code(), itemDto.getOpc_plc_code(), itemDto.getDevice_code(), itemDto.getItem_code().substring(itemDto.getItem_code().lastIndexOf(".") + 1),
|
|
||||
// String.valueOf(itemDto.getHis_item_value()), String.valueOf(itemDto.getItem_value()));
|
|
||||
// luceneLogDto.setLogType(LogTypeEnum.DEVICE_LOG.getDesc());
|
|
||||
//IndexWriter indexWriter = LuceneIndexWriter.getIndexWriter();
|
|
||||
//创建一个Document对象
|
|
||||
Document document = new Document(); |
|
||||
try { |
|
||||
//记录索引开始时间
|
|
||||
long startTime = System.currentTimeMillis(); |
|
||||
//向document对象中添加域。
|
|
||||
if (ObjectUtil.isNotEmpty(luceneLogDto.getDevice_code())) { |
|
||||
document.add(new StringField("device_code", luceneLogDto.getDevice_code(), Field.Store.YES)); |
|
||||
// document.add(new TextField("device_code", luceneLogDto.getDevice_code(), Field.Store.YES));
|
|
||||
} |
|
||||
if (ObjectUtil.isNotEmpty(luceneLogDto.getContent())) { |
|
||||
document.add(new StringField("fieldContent", luceneLogDto.getContent(), Field.Store.YES)); |
|
||||
} |
|
||||
if (ObjectUtil.isNotEmpty(luceneLogDto.getMethod())) { |
|
||||
document.add(new StringField("method", luceneLogDto.getMethod(), Field.Store.YES)); |
|
||||
} |
|
||||
if (ObjectUtil.isNotEmpty(luceneLogDto.getStatus_code())) { |
|
||||
document.add(new StringField("status_code", luceneLogDto.getStatus_code(), Field.Store.YES)); |
|
||||
} |
|
||||
if (ObjectUtil.isNotEmpty(luceneLogDto.getRequestparam())) { |
|
||||
document.add(new StringField("requestparam", luceneLogDto.getRequestparam(), Field.Store.YES)); |
|
||||
} |
|
||||
if (ObjectUtil.isNotEmpty(luceneLogDto.getResponseparam())) { |
|
||||
document.add(new StringField("responseparam", luceneLogDto.getResponseparam(), Field.Store.YES)); |
|
||||
} |
|
||||
document.add(new StringField("logType", luceneLogDto.getLogType(), Field.Store.YES)); |
|
||||
document.add(new StringField("logTime", DateUtil.format(new DateTime(), "yyyy-MM-dd HH:mm:ss.SSS"), Field.Store.YES)); |
|
||||
document.add(new NumericDocValuesField("logTime",System.currentTimeMillis()));//排序
|
|
||||
//记录索引结束时间
|
|
||||
long endTime = System.currentTimeMillis(); |
|
||||
// log.info("建立索引共耗时{}毫秒", endTime - startTime);
|
|
||||
|
|
||||
try { |
|
||||
indexWriter.addDocument(document); |
|
||||
indexWriter.commit(); |
|
||||
} catch (IOException e) { |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
} catch (Exception e) { |
|
||||
return; |
|
||||
} |
|
||||
} catch (Exception e){ |
|
||||
return; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
@Override |
|
||||
public void stop() { |
|
||||
super.stop(); |
|
||||
try { |
|
||||
indexWriter.close(); |
|
||||
} catch (IOException e) { |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,65 +0,0 @@ |
|||||
package org.nl.modules.lucene.common; |
|
||||
|
|
||||
import cn.hutool.core.date.DateUtil; |
|
||||
import org.apache.lucene.index.CorruptIndexException; |
|
||||
import org.apache.lucene.index.IndexWriter; |
|
||||
import org.apache.lucene.index.IndexWriterConfig; |
|
||||
import org.apache.lucene.store.Directory; |
|
||||
import org.apache.lucene.store.FSDirectory; |
|
||||
import org.nl.modules.lucene.config.UrlConfig; |
|
||||
import org.wltea.analyzer.lucene.IKAnalyzer; |
|
||||
|
|
||||
import java.io.File; |
|
||||
import java.io.IOException; |
|
||||
import java.text.ParseException; |
|
||||
import java.text.SimpleDateFormat; |
|
||||
import java.util.Date; |
|
||||
|
|
||||
public class LuceneIndexWriter { |
|
||||
// private static IndexWriter indexWriter;
|
|
||||
//
|
|
||||
// static {
|
|
||||
// try {
|
|
||||
// Directory directory = FSDirectory.open(new File(UrlConfig.luceneUrl).toPath());
|
|
||||
// IndexWriterConfig config = new IndexWriterConfig(new IKAnalyzer());
|
|
||||
// indexWriter = new IndexWriter(directory, config);
|
|
||||
// } catch (Exception e) {
|
|
||||
// e.printStackTrace();
|
|
||||
// }
|
|
||||
// /**当当前线程结束时,自动关闭IndexWriter,使用Runtime对象*/
|
|
||||
// Runtime.getRuntime().addShutdownHook(new Thread(){
|
|
||||
// @Override
|
|
||||
// public void run() {
|
|
||||
// try {
|
|
||||
// closeIndexWriter();
|
|
||||
// } catch (Exception e) {
|
|
||||
// e.printStackTrace();
|
|
||||
// }
|
|
||||
// }
|
|
||||
// });
|
|
||||
// }
|
|
||||
// /**在线程结束时,自动关闭IndexWriter*/
|
|
||||
// public static IndexWriter getIndexWriter() {
|
|
||||
// return indexWriter;
|
|
||||
// }
|
|
||||
// /**关闭IndexWriter
|
|
||||
// * @throws IOException
|
|
||||
// * @throws CorruptIndexException */
|
|
||||
// public static void closeIndexWriter() throws Exception {
|
|
||||
// if(indexWriter != null) {
|
|
||||
// indexWriter.close();
|
|
||||
// }
|
|
||||
// }
|
|
||||
//
|
|
||||
// public static void main(String[] args) throws IOException {
|
|
||||
// indexWriter.deleteAll();
|
|
||||
// }
|
|
||||
//
|
|
||||
public static String getDate(String timeString) throws ParseException { |
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");//时间格式
|
|
||||
Date date = sdf.parse(timeString); |
|
||||
timeString = DateUtil.format(date, "yyyy-MM-dd HH:mm:ss.SSS");//格式化后的时间
|
|
||||
return timeString; |
|
||||
} |
|
||||
|
|
||||
} |
|
@ -1,144 +0,0 @@ |
|||||
package org.nl.modules.lucene.common; |
|
||||
|
|
||||
import cn.hutool.core.date.DateTime; |
|
||||
import cn.hutool.core.date.DateUtil; |
|
||||
import com.alibaba.fastjson.JSONArray; |
|
||||
import com.alibaba.fastjson.JSONObject; |
|
||||
import lombok.extern.slf4j.Slf4j; |
|
||||
import org.apache.lucene.analysis.Analyzer; |
|
||||
import org.apache.lucene.document.Document; |
|
||||
import org.apache.lucene.index.DirectoryReader; |
|
||||
import org.apache.lucene.index.IndexReader; |
|
||||
import org.apache.lucene.index.Term; |
|
||||
import org.apache.lucene.queryparser.classic.QueryParser; |
|
||||
import org.apache.lucene.search.*; |
|
||||
import org.apache.lucene.store.Directory; |
|
||||
import org.apache.lucene.store.FSDirectory; |
|
||||
import org.apache.lucene.util.BytesRef; |
|
||||
import org.wltea.analyzer.lucene.IKAnalyzer; |
|
||||
|
|
||||
import java.nio.file.Paths; |
|
||||
import java.util.ArrayList; |
|
||||
import java.util.Calendar; |
|
||||
import java.util.List; |
|
||||
import java.util.Map; |
|
||||
|
|
||||
/** |
|
||||
* lucene查询器 |
|
||||
*/ |
|
||||
@Slf4j |
|
||||
public class Searcher { |
|
||||
|
|
||||
public static Map<String, Object> search(String indexDir, String ext,Map whereJson) throws Exception { |
|
||||
//获取要查询的路径,也就是索引所在的位置
|
|
||||
Directory dir = FSDirectory.open(Paths.get(indexDir)); |
|
||||
IndexReader reader = DirectoryReader.open(dir); |
|
||||
//构建IndexSearcher
|
|
||||
IndexSearcher searcher = new IndexSearcher(reader); |
|
||||
//标准分词器,会自动去掉空格啊,is a the等单词
|
|
||||
Analyzer analyzer = new IKAnalyzer(true); |
|
||||
// Analyzer analyzer = new StandardAnalyzer();
|
|
||||
// Analyzer analyzer = new IKAnalyzer(false);
|
|
||||
//查询解析器
|
|
||||
// QueryParser queryParser = new QueryParser("fieldContent", analyzer);
|
|
||||
|
|
||||
//记录索引开始时间
|
|
||||
long startTime = System.currentTimeMillis(); |
|
||||
// 实际上Lucene本身不支持分页。因此我们需要自己进行逻辑分页。我们要准备分页参数:
|
|
||||
int pageSize = Integer.parseInt(whereJson.get("size").toString());// 每页条数
|
|
||||
int pageNum = Integer.parseInt(whereJson.get("page").toString());// 当前页码
|
|
||||
int start = pageNum * pageSize;// 当前页的起始条数
|
|
||||
int end = start + pageSize;// 当前页的结束条数(不能包含)
|
|
||||
// 创建排序对象,需要排序字段SortField,参数:字段的名称、字段的类型、是否反转如果是false,升序。true降序
|
|
||||
Sort sort = new Sort(new SortField("logTime", SortField.Type.LONG,true)); |
|
||||
|
|
||||
TopDocs docs = null; |
|
||||
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); |
|
||||
//时间范围查询
|
|
||||
String startDate = (String) whereJson.get("begin_time"); |
|
||||
String endDate = (String) whereJson.get("end_time"); |
|
||||
Calendar calendar=Calendar.getInstance(); |
|
||||
calendar.set(1970, 0, 1); |
|
||||
if (startDate == null){ |
|
||||
startDate = DateUtil.format(calendar.getTime(),"yyyy-MM-dd HH:mm:ss.SSS"); |
|
||||
}else{ |
|
||||
startDate = LuceneIndexWriter.getDate(startDate); |
|
||||
} |
|
||||
if (endDate == null){ |
|
||||
endDate = DateUtil.format(new DateTime(),"yyyy-MM-dd HH:mm:ss.SSS"); |
|
||||
} else { |
|
||||
endDate = LuceneIndexWriter.getDate(endDate); |
|
||||
} |
|
||||
TermRangeQuery termRangeQuery = new TermRangeQuery("logTime", new BytesRef(startDate), new BytesRef(endDate), true, true); |
|
||||
booleanQueryBuilder.add(termRangeQuery,BooleanClause.Occur.MUST); |
|
||||
if (whereJson.get("device_code") != null){ |
|
||||
Query termQuery = new TermQuery(new Term("device_code", (String) whereJson.get("device_code"))); |
|
||||
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST); |
|
||||
} |
|
||||
if (whereJson.get("method") != null){ |
|
||||
Query termQuery = new TermQuery(new Term("method", (String) whereJson.get("method"))); |
|
||||
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST); |
|
||||
} |
|
||||
if (whereJson.get("status_code") != null){ |
|
||||
Query termQuery = new TermQuery(new Term("status_code", (String) whereJson.get("status_code"))); |
|
||||
booleanQueryBuilder.add(termQuery,BooleanClause.Occur.MUST); |
|
||||
} |
|
||||
if (whereJson.get("requestparam") != null){ |
|
||||
WildcardQuery query = new WildcardQuery(new Term("requestparam", "*"+(String) whereJson.get("requestparam")+"*")); |
|
||||
booleanQueryBuilder.add(query,BooleanClause.Occur.MUST); |
|
||||
} |
|
||||
if (whereJson.get("responseparam") != null){ |
|
||||
WildcardQuery query = new WildcardQuery(new Term("responseparam", "*"+(String) whereJson.get("responseparam")+"*")); |
|
||||
booleanQueryBuilder.add(query,BooleanClause.Occur.MUST); |
|
||||
} |
|
||||
if (whereJson.get("blurry") != null) { |
|
||||
WildcardQuery query = new WildcardQuery(new Term("fieldContent", "*"+(String) whereJson.get("blurry")+"*")); |
|
||||
booleanQueryBuilder.add(query, BooleanClause.Occur.MUST); |
|
||||
} |
|
||||
docs = searcher.search(booleanQueryBuilder.build(), end,sort); |
|
||||
//记录索引时间
|
|
||||
long endTime = System.currentTimeMillis(); |
|
||||
log.info("匹配{}共耗时{}毫秒",booleanQueryBuilder.build(),(endTime-startTime)); |
|
||||
log.info("查询到{}条日志文件", docs.totalHits.value); |
|
||||
List<String> list = new ArrayList<>(); |
|
||||
ScoreDoc[] scoreDocs = docs.scoreDocs; |
|
||||
if (end > docs.totalHits.value) end = (int) docs.totalHits.value; |
|
||||
JSONArray array = new JSONArray(); |
|
||||
|
|
||||
for (int i = start; i < end; i++) { |
|
||||
ScoreDoc scoreDoc = scoreDocs[i]; |
|
||||
Document doc = reader.document(scoreDoc.doc); |
|
||||
JSONObject object = new JSONObject(); |
|
||||
object.put("content",doc.get("fieldContent")); |
|
||||
object.put("device_code",doc.get("device_code")); |
|
||||
object.put("logTime",doc.get("logTime")); |
|
||||
object.put("method",doc.get("method")); |
|
||||
object.put("status_code",doc.get("status_code")); |
|
||||
object.put("requestparam",doc.get("requestparam")); |
|
||||
object.put("responseparam",doc.get("responseparam")); |
|
||||
if(doc.get("fieldContent") != null) { |
|
||||
array.add(object); |
|
||||
} |
|
||||
} |
|
||||
for(Object logDto:array){ |
|
||||
log.info(logDto.toString()); |
|
||||
} |
|
||||
reader.close(); |
|
||||
JSONObject jo = new JSONObject(); |
|
||||
jo.put("content", array); |
|
||||
jo.put("totalElements", docs.totalHits.value); |
|
||||
return jo; |
|
||||
} |
|
||||
|
|
||||
public static void main(String[] args) { |
|
||||
String indexDir = "D:\\lucene\\index"; |
|
||||
//查询这个字符串
|
|
||||
String q = "07.832"; |
|
||||
Map whereJson = null; |
|
||||
try { |
|
||||
search(indexDir, q,whereJson); |
|
||||
} catch (Exception e) { |
|
||||
e.printStackTrace(); |
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -0,0 +1,43 @@ |
|||||
|
package org.nl.modules.lucene.config; |
||||
|
/** |
||||
|
* @author ldjun |
||||
|
* @version 1.0 |
||||
|
* @date 2023年08月24日 13:00 |
||||
|
* @desc desc |
||||
|
*/ |
||||
|
|
||||
|
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
|
import cn.hutool.core.util.IdUtil; |
||||
|
import com.yomahub.tlog.core.context.AspectLogContext; |
||||
|
import com.yomahub.tlog.core.enhance.logback.async.AspectLogbackAsyncAppender; |
||||
|
import org.apache.commons.lang3.StringUtils; |
||||
|
import org.slf4j.MDC; |
||||
|
|
||||
|
import java.util.Map; |
||||
|
|
||||
|
public class AsyncLuceneAppender extends AspectLogbackAsyncAppender { |
||||
|
|
||||
|
|
||||
|
@Override |
||||
|
protected void append(ILoggingEvent event) { |
||||
|
String traceId = AspectLogContext.getLogValue(); |
||||
|
if (StringUtils.isEmpty(traceId)){ |
||||
|
traceId = IdUtil.nanoId()+"@"; |
||||
|
AspectLogContext.putLogValue(traceId); |
||||
|
}else { |
||||
|
if (!traceId.contains("@")){ |
||||
|
AspectLogContext.putLogValue(traceId+"@"); |
||||
|
} |
||||
|
} |
||||
|
if (StringUtils.isNotEmpty(traceId)){ |
||||
|
MDC.put("traceId",traceId); |
||||
|
Map<String, String> mdcPropertyMap = event.getMDCPropertyMap(); |
||||
|
if (mdcPropertyMap.getClass().getName().contains("SynchronizedMap")){ |
||||
|
mdcPropertyMap.put("traceId",traceId); |
||||
|
} |
||||
|
MDC.clear(); |
||||
|
} |
||||
|
super.append(event); |
||||
|
} |
||||
|
|
||||
|
} |
@ -0,0 +1,133 @@ |
|||||
|
package org.nl.modules.lucene.config; |
||||
|
/** |
||||
|
* @author ldjun |
||||
|
* @version 1.0 |
||||
|
* @date 2023年08月24日 13:00 |
||||
|
* @desc desc |
||||
|
*/ |
||||
|
|
||||
|
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
|
import ch.qos.logback.core.AppenderBase; |
||||
|
import cn.hutool.core.date.DateTime; |
||||
|
import cn.hutool.core.date.DateUtil; |
||||
|
import cn.hutool.core.util.ObjectUtil; |
||||
|
import com.alibaba.fastjson.JSONObject; |
||||
|
import com.alibaba.ttl.TransmittableThreadLocal; |
||||
|
import org.apache.lucene.analysis.Analyzer; |
||||
|
import org.apache.lucene.document.Document; |
||||
|
import org.apache.lucene.document.Field; |
||||
|
import org.apache.lucene.document.NumericDocValuesField; |
||||
|
import org.apache.lucene.document.StringField; |
||||
|
import org.apache.lucene.index.IndexWriter; |
||||
|
import org.apache.lucene.index.IndexWriterConfig; |
||||
|
import org.apache.lucene.store.Directory; |
||||
|
import org.apache.lucene.store.FSDirectory; |
||||
|
import org.nl.modules.lucene.service.dto.LuceneLogDto; |
||||
|
import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; |
||||
|
import org.springframework.core.io.ClassPathResource; |
||||
|
import org.springframework.core.io.Resource; |
||||
|
import org.wltea.analyzer.lucene.IKAnalyzer; |
||||
|
|
||||
|
import java.io.IOException; |
||||
|
import java.nio.file.Paths; |
||||
|
import java.util.List; |
||||
|
import java.util.Map; |
||||
|
import java.util.Properties; |
||||
|
|
||||
|
public class LuceneAppender extends AppenderBase<ILoggingEvent> { |
||||
|
|
||||
|
public static final TransmittableThreadLocal<String> traceIdTL = new TransmittableThreadLocal(); |
||||
|
public LuceneProperties properties; |
||||
|
public static Directory index; |
||||
|
private List<LucenePropertyAndEncoder> encoders; |
||||
|
public static IndexWriter indexWriter; |
||||
|
|
||||
|
|
||||
|
@Override |
||||
|
public void start() { |
||||
|
super.start(); |
||||
|
try { |
||||
|
init(); |
||||
|
} catch (Exception e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public static void init() throws IOException { |
||||
|
Resource resource = new ClassPathResource("config/application.yml"); |
||||
|
YamlPropertiesFactoryBean yamlPropertiesFactoryBean = new YamlPropertiesFactoryBean(); |
||||
|
yamlPropertiesFactoryBean.setResources(resource); |
||||
|
Properties properties = yamlPropertiesFactoryBean.getObject(); |
||||
|
// 获取配置值
|
||||
|
String luceneDir = properties.getProperty("lucene.index.path"); |
||||
|
System.out.println("---index地址----" + luceneDir); |
||||
|
index = FSDirectory.open(Paths.get(luceneDir)); |
||||
|
// 初始化 Lucene 索引
|
||||
|
Analyzer analyzer = new IKAnalyzer(); |
||||
|
IndexWriterConfig config = new IndexWriterConfig(analyzer); |
||||
|
indexWriter = new IndexWriter(index, config); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
@Override |
||||
|
protected void append(ILoggingEvent event) { |
||||
|
String message = event.getFormattedMessage(); |
||||
|
String[] split = message.split("@"); |
||||
|
LuceneLogDto luceneLogDto = JSONObject.parseObject(split[1], LuceneLogDto.class); |
||||
|
Document document = new Document(); |
||||
|
try { |
||||
|
//向document对象中添加域。
|
||||
|
Map<String, String> mdcPropertyMap = event.getMDCPropertyMap(); |
||||
|
String traceId = mdcPropertyMap.get("traceId"); |
||||
|
System.out.println("---追踪号---"+traceId); |
||||
|
if (ObjectUtil.isNotEmpty(traceId)) { |
||||
|
document.add(new StringField("trace_id", traceId, Field.Store.YES)); |
||||
|
} |
||||
|
if (ObjectUtil.isNotEmpty(luceneLogDto.getDevice_code())) { |
||||
|
document.add(new StringField("device_code", luceneLogDto.getDevice_code(), Field.Store.YES)); |
||||
|
} |
||||
|
if (ObjectUtil.isNotEmpty(luceneLogDto.getContent())) { |
||||
|
document.add(new StringField("fieldContent", luceneLogDto.getContent(), Field.Store.YES)); |
||||
|
} |
||||
|
if (ObjectUtil.isNotEmpty(luceneLogDto.getMethod())) { |
||||
|
document.add(new StringField("method", luceneLogDto.getMethod(), Field.Store.YES)); |
||||
|
} |
||||
|
if (ObjectUtil.isNotEmpty(luceneLogDto.getStatus_code())) { |
||||
|
document.add(new StringField("status_code", luceneLogDto.getStatus_code(), Field.Store.YES)); |
||||
|
} |
||||
|
if (ObjectUtil.isNotEmpty(luceneLogDto.getRequestparam())) { |
||||
|
document.add(new StringField("requestparam", luceneLogDto.getRequestparam(), Field.Store.YES)); |
||||
|
} |
||||
|
if (ObjectUtil.isNotEmpty(luceneLogDto.getResponseparam())) { |
||||
|
document.add(new StringField("responseparam", luceneLogDto.getResponseparam(), Field.Store.YES)); |
||||
|
} |
||||
|
document.add(new StringField("logType", luceneLogDto.getLogType(), Field.Store.YES)); |
||||
|
document.add(new StringField("logTime", DateUtil.format(new DateTime(), "yyyy-MM-dd HH:mm:ss.SSS"), Field.Store.YES)); |
||||
|
document.add(new NumericDocValuesField("logTime",System.currentTimeMillis()));//排序
|
||||
|
|
||||
|
try { |
||||
|
indexWriter.addDocument(document); |
||||
|
indexWriter.commit(); |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} catch (Exception e) { |
||||
|
return; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
@Override |
||||
|
public void stop() { |
||||
|
super.stop(); |
||||
|
try { |
||||
|
indexWriter.close(); |
||||
|
} catch (IOException e) { |
||||
|
e.printStackTrace(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public void setProperties(LuceneProperties properties) { |
||||
|
this.properties = properties; |
||||
|
|
||||
|
} |
||||
|
} |
@ -0,0 +1,23 @@ |
|||||
|
package org.nl.modules.lucene.config; |
||||
|
|
||||
|
|
||||
|
import java.util.ArrayList; |
||||
|
import java.util.List; |
||||
|
|
||||
|
public class LuceneProperties { |
||||
|
|
||||
|
private List<Property> properties; |
||||
|
|
||||
|
public LuceneProperties() { |
||||
|
this.properties = new ArrayList<Property>(); |
||||
|
} |
||||
|
|
||||
|
public List<Property> getProperties() { |
||||
|
return properties; |
||||
|
} |
||||
|
|
||||
|
public void addProperty(Property property) { |
||||
|
properties.add(property); |
||||
|
} |
||||
|
|
||||
|
} |
@ -0,0 +1,38 @@ |
|||||
|
package org.nl.modules.lucene.config; |
||||
|
|
||||
|
import ch.qos.logback.classic.PatternLayout; |
||||
|
import ch.qos.logback.classic.spi.ILoggingEvent; |
||||
|
import ch.qos.logback.core.Context; |
||||
|
import ch.qos.logback.core.pattern.PatternLayoutBase; |
||||
|
|
||||
|
/* |
||||
|
* @author ZZQ |
||||
|
* @Date 2023/12/22 18:11 |
||||
|
*/ |
||||
|
public class LucenePropertyAndEncoder { |
||||
|
|
||||
|
private Property property; |
||||
|
|
||||
|
private PatternLayoutBase layout = new PatternLayout(); |
||||
|
|
||||
|
public LucenePropertyAndEncoder(Property property, Context context) { |
||||
|
this.property = property; |
||||
|
this.layout.setContext(context); |
||||
|
this.layout.setPattern(String.valueOf(property.getValue())); |
||||
|
this.layout.setPostCompileProcessor(null); |
||||
|
this.layout.start(); |
||||
|
} |
||||
|
|
||||
|
public String encode(ILoggingEvent event) { |
||||
|
return layout.doLayout(event); |
||||
|
} |
||||
|
|
||||
|
public String getName() { |
||||
|
return property.getName(); |
||||
|
} |
||||
|
|
||||
|
public boolean allowEmpty() { |
||||
|
return property.isAllowEmpty(); |
||||
|
} |
||||
|
} |
||||
|
|
@ -0,0 +1,44 @@ |
|||||
|
package org.nl.modules.lucene.config; |
||||
|
|
||||
|
/* |
||||
|
* @author ZZQ |
||||
|
* @Date 2023/12/26 15:30 |
||||
|
*/ |
||||
|
public class Property { |
||||
|
private String name; |
||||
|
private String value; |
||||
|
private boolean allowEmpty; |
||||
|
|
||||
|
public Property() { |
||||
|
} |
||||
|
|
||||
|
public Property(String name, String value, boolean allowEmpty) { |
||||
|
this.name = name; |
||||
|
this.value = value; |
||||
|
this.allowEmpty = allowEmpty; |
||||
|
} |
||||
|
|
||||
|
public String getName() { |
||||
|
return name; |
||||
|
} |
||||
|
|
||||
|
public void setName(String name) { |
||||
|
this.name = name; |
||||
|
} |
||||
|
|
||||
|
public String getValue() { |
||||
|
return value; |
||||
|
} |
||||
|
|
||||
|
public void setValue(String value) { |
||||
|
this.value = value; |
||||
|
} |
||||
|
|
||||
|
public boolean isAllowEmpty() { |
||||
|
return allowEmpty; |
||||
|
} |
||||
|
|
||||
|
public void setAllowEmpty(boolean allowEmpty) { |
||||
|
this.allowEmpty = allowEmpty; |
||||
|
} |
||||
|
} |
@ -1,21 +0,0 @@ |
|||||
package org.nl.modules.lucene.config; |
|
||||
|
|
||||
import org.springframework.beans.factory.annotation.Value; |
|
||||
import org.springframework.context.annotation.Bean; |
|
||||
import org.springframework.context.annotation.Configuration; |
|
||||
|
|
||||
/** |
|
||||
* @deprecated 设置静态参数初始化 |
|
||||
*/ |
|
||||
@Configuration |
|
||||
public class StaticConfig { |
|
||||
//日志索引目录
|
|
||||
@Value("${lucene.index.path}") |
|
||||
private String luceneDir; |
|
||||
|
|
||||
@Bean |
|
||||
public int initStatic() { |
|
||||
UrlConfig.setLuceneUrl(luceneDir); |
|
||||
return 0; |
|
||||
} |
|
||||
} |
|
@ -1,13 +0,0 @@ |
|||||
package org.nl.modules.lucene.config; |
|
||||
|
|
||||
public class UrlConfig { |
|
||||
public static String luceneUrl; |
|
||||
|
|
||||
public static String getLuceneUrl() { |
|
||||
return luceneUrl; |
|
||||
} |
|
||||
|
|
||||
public static void setLuceneUrl(String luceneUrl) { |
|
||||
UrlConfig.luceneUrl = luceneUrl; |
|
||||
} |
|
||||
} |
|
@ -0,0 +1,151 @@ |
|||||
|
server: |
||||
|
port: 8010 |
||||
|
#配置数据源 |
||||
|
spring: |
||||
|
datasource: |
||||
|
druid: |
||||
|
db-type: com.alibaba.druid.pool.DruidDataSource |
||||
|
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy |
||||
|
url: jdbc:log4jdbc:mysql://${DB_HOST:127.0.0.1}:${DB_PORT:3306}/${DB_NAME:shangdianke_acs}?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&useOldAliasMetadataBehavior=true&allowPublicKeyRetrieval=true |
||||
|
username: ${DB_USER:root} |
||||
|
password: ${DB_PWD:password} |
||||
|
# 初始连接数 |
||||
|
initial-size: 5 |
||||
|
# 最小连接数 |
||||
|
min-idle: 15 |
||||
|
# 最大连接数 |
||||
|
max-active: 30 |
||||
|
# 超时时间(以秒数为单位) |
||||
|
remove-abandoned-timeout: 180 |
||||
|
# 获取连接超时时间 |
||||
|
max-wait: 3000 |
||||
|
# 连接有效性检测时间 |
||||
|
time-between-eviction-runs-millis: 60000 |
||||
|
# 连接在池中最小生存的时间 |
||||
|
min-evictable-idle-time-millis: 300000 |
||||
|
# 连接在池中最大生存的时间 |
||||
|
max-evictable-idle-time-millis: 900000 |
||||
|
# 指明连接是否被空闲连接回收器(如果有)进行检验.如果检测失败,则连接将被从池中去除 |
||||
|
test-while-idle: true |
||||
|
# 指明是否在从池中取出连接前进行检验,如果检验失败, 则从池中去除连接并尝试取出另一个 |
||||
|
test-on-borrow: true |
||||
|
# 是否在归还到池中前进行检验 |
||||
|
test-on-return: false |
||||
|
# 检测连接是否有效 |
||||
|
validation-query: select 1 |
||||
|
# 配置监控统计 |
||||
|
webStatFilter: |
||||
|
enabled: true |
||||
|
stat-view-servlet: |
||||
|
enabled: true |
||||
|
url-pattern: /druid/* |
||||
|
reset-enable: false |
||||
|
filter: |
||||
|
stat: |
||||
|
enabled: true |
||||
|
# 记录慢SQL |
||||
|
log-slow-sql: true |
||||
|
slow-sql-millis: 1000 |
||||
|
merge-sql: true |
||||
|
wall: |
||||
|
config: |
||||
|
multi-statement-allow: true |
||||
|
redis: |
||||
|
#数据库索引 |
||||
|
database: ${REDIS_DB:15} |
||||
|
host: ${REDIS_HOST:127.0.0.1} |
||||
|
port: ${REDIS_PORT:6379} |
||||
|
password: ${REDIS_PWD:} |
||||
|
|
||||
|
# 登录相关配置 |
||||
|
login: |
||||
|
# 登录缓存 |
||||
|
cache-enable: true |
||||
|
# 是否限制单用户登录 |
||||
|
single-login: false |
||||
|
# 验证码 |
||||
|
login-code: |
||||
|
# 验证码类型配置 查看 LoginProperties 类 |
||||
|
code-type: arithmetic |
||||
|
# 登录图形验证码有效时间/分钟 |
||||
|
expiration: 2 |
||||
|
# 验证码高度 |
||||
|
width: 111 |
||||
|
# 验证码宽度 |
||||
|
heigth: 36 |
||||
|
# 内容长度 |
||||
|
length: 2 |
||||
|
# 字体名称,为空则使用默认字体 |
||||
|
font-name: |
||||
|
# 字体大小 |
||||
|
font-size: 25 |
||||
|
|
||||
|
#jwt |
||||
|
jwt: |
||||
|
header: Authorization |
||||
|
# 令牌前缀 |
||||
|
token-start-with: Bearer |
||||
|
# 必须使用最少88位的Base64对该令牌进行编码 |
||||
|
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmNTE0MzI= |
||||
|
# 令牌过期时间 此处单位/毫秒 ,默认4小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html |
||||
|
token-validity-in-seconds: 14400000 |
||||
|
# 在线用户key |
||||
|
online-key: online-token- |
||||
|
# 验证码 |
||||
|
code-key: code-key- |
||||
|
# token 续期检查时间范围(默认30分钟,单位毫秒),在token即将过期的一段时间内用户操作了,则给用户的token续期 |
||||
|
detect: 1800000 |
||||
|
# 续期时间范围,默认1小时,单位毫秒 |
||||
|
renew: 3600000 |
||||
|
|
||||
|
#是否允许生成代码,生产环境设置为false |
||||
|
generator: |
||||
|
enabled: true |
||||
|
|
||||
|
#是否开启 swagger-ui |
||||
|
swagger: |
||||
|
enabled: true |
||||
|
|
||||
|
# IP 本地解析 |
||||
|
ip: |
||||
|
local-parsing: true |
||||
|
|
||||
|
# 文件存储路径 |
||||
|
file: |
||||
|
mac: |
||||
|
path: ~/file/ |
||||
|
avatar: ~/avatar/ |
||||
|
linux: |
||||
|
path: /home/eladmin/file/ |
||||
|
avatar: /home/eladmin/avatar/ |
||||
|
windows: |
||||
|
path: C:\eladmin\file\ |
||||
|
avatar: C:\eladmin\avatar\ |
||||
|
# 文件大小 /M |
||||
|
maxSize: 100 |
||||
|
avatarMaxSize: 5 |
||||
|
logging: |
||||
|
file: |
||||
|
path: /Users/onepiece/myFile/acs |
||||
|
config: classpath:logback-spring.xml |
||||
|
|
||||
|
# Sa-Token配置 |
||||
|
sa-token: |
||||
|
# token 名称 (同时也是cookie名称) |
||||
|
token-name: Authorization |
||||
|
# token 有效期,单位s 默认30天, -1代表永不过期 |
||||
|
timeout: 2592000 |
||||
|
# token 临时有效期 (指定时间内无操作就视为token过期) 单位: 秒 |
||||
|
activity-timeout: -1 |
||||
|
# 是否允许同一账号并发登录 (为true时允许一起登录, 为false时新登录挤掉旧登录) |
||||
|
is-concurrent: true |
||||
|
# 在多人登录同一账号时,是否共用一个token (为true时所有登录共用一个token, 为false时每次登录新建一个token) |
||||
|
is-share: false |
||||
|
# token风格 |
||||
|
token-style: random-128 |
||||
|
# 是否输出操作日志 |
||||
|
is-log: false |
||||
|
jwt-secret-key: opsjajisdnnca0sdkksdfaaasdfwwq |
||||
|
# token 前缀 |
||||
|
token-prefix: Bearer |
||||
|
|
@ -1,33 +0,0 @@ |
|||||
<?xml version="1.0" encoding="UTF-8"?> |
|
||||
<included> |
|
||||
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/> |
|
||||
<property name="LOG_HOME" value="${logPath}"/> |
|
||||
<!-- 按照每天生成日志文件 --> |
|
||||
<appender name="AgvNdcOneDeviceDriver" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
|
||||
<!--日志文件输出的文件名--> |
|
||||
<FileNamePattern>${LOG_HOME}/AGV驱动与NDC交互/%d{yyyy-MM-dd}.%i.log</FileNamePattern> |
|
||||
<!--日志文件保留天数--> |
|
||||
<maxHistory>15</maxHistory> |
|
||||
<!--单个日志最大容量 至少10MB才能看得出来--> |
|
||||
<maxFileSize>200MB</maxFileSize> |
|
||||
<!--所有日志最多占多大容量--> |
|
||||
<totalSizeCap>2GB</totalSizeCap> |
|
||||
</rollingPolicy> |
|
||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|
||||
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符--> |
|
||||
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern> |
|
||||
<charset>${log.charset}</charset> |
|
||||
</encoder> |
|
||||
|
|
||||
</appender> |
|
||||
|
|
||||
<!-- <logger name="org.nl.start.Init" level="info" additivity="false"> |
|
||||
<appender-ref ref="FILE3"/> |
|
||||
</logger>--> |
|
||||
|
|
||||
<!-- 打印sql --> |
|
||||
<logger name="org.nl.acs.device_driver.basedriver.agv.ndcone.AgvNdcOneDeviceDriver" level="info" additivity="false"> |
|
||||
<appender-ref ref="AgvNdcOneDeviceDriver"/> |
|
||||
</logger> |
|
||||
</included> |
|
@ -1,41 +0,0 @@ |
|||||
<?xml version="1.0" encoding="UTF-8"?> |
|
||||
<included> |
|
||||
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/> |
|
||||
<property name="LOG_HOME" value="${logPath}"/> |
|
||||
<define name="DEVICECODE" class="org.nl.modules.logging.DeviceCodeDir"/> |
|
||||
<!-- 按照每天生成日志文件 --> |
|
||||
<appender name="FILE_LUCENE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
|
||||
<!--日志文件输出的文件名--> |
|
||||
<FileNamePattern>${LOG_HOME}/lucene/${DEVICECODE}/%d{yyyy-MM-dd}.%i.log</FileNamePattern> |
|
||||
<!--日志文件保留天数--> |
|
||||
<maxHistory>15</maxHistory> |
|
||||
<!--单个日志最大容量 至少10MB才能看得出来--> |
|
||||
<maxFileSize>200MB</maxFileSize> |
|
||||
<!--所有日志最多占多大容量--> |
|
||||
<totalSizeCap>2GB</totalSizeCap> |
|
||||
</rollingPolicy> |
|
||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|
||||
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符--> |
|
||||
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern> |
|
||||
<charset>${log.charset}</charset> |
|
||||
</encoder> |
|
||||
|
|
||||
</appender> |
|
||||
|
|
||||
<appender name="luceneAppender" class="org.nl.modules.lucene.common.LuceneAppender" /> |
|
||||
<appender name="asyncLuceneAppender" class="ch.qos.logback.classic.AsyncAppender"> |
|
||||
<appender-ref ref="luceneAppender" /> |
|
||||
<!-- 设置队列大小,根据您的需求调整 --> |
|
||||
<queueSize>512</queueSize> |
|
||||
</appender> |
|
||||
<!-- <logger name="org.nl.start.Init" level="info" additivity="false"> |
|
||||
<appender-ref ref="FILE3"/> |
|
||||
</logger>--> |
|
||||
|
|
||||
<!-- 打印sql --> |
|
||||
<logger name="org.nl.modules.lucene.service.impl.LuceneExecuteLogServiceImpl" level="info" additivity="true"> |
|
||||
<appender-ref ref="FILE_LUCENE"/> |
|
||||
<appender-ref ref="asyncLuceneAppender"/> |
|
||||
</logger> |
|
||||
</included> |
|
@ -1,33 +0,0 @@ |
|||||
<?xml version="1.0" encoding="UTF-8"?> |
|
||||
<included> |
|
||||
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/> |
|
||||
<property name="LOG_HOME" value="${logPath}"/> |
|
||||
<!-- 按照每天生成日志文件 --> |
|
||||
<appender name="NDCAgvServiceImpl" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
|
||||
<!--日志文件输出的文件名--> |
|
||||
<FileNamePattern>${LOG_HOME}/NDCAGV服务/%d{yyyy-MM-dd}.%i.log</FileNamePattern> |
|
||||
<!--日志文件保留天数--> |
|
||||
<maxHistory>15</maxHistory> |
|
||||
<!--单个日志最大容量 至少10MB才能看得出来--> |
|
||||
<maxFileSize>200MB</maxFileSize> |
|
||||
<!--所有日志最多占多大容量--> |
|
||||
<totalSizeCap>2GB</totalSizeCap> |
|
||||
</rollingPolicy> |
|
||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|
||||
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符--> |
|
||||
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern> |
|
||||
<charset>${log.charset}</charset> |
|
||||
</encoder> |
|
||||
|
|
||||
</appender> |
|
||||
|
|
||||
<!-- <logger name="org.nl.start.Init" level="info" additivity="false"> |
|
||||
<appender-ref ref="FILE3"/> |
|
||||
</logger>--> |
|
||||
|
|
||||
<!-- 打印sql --> |
|
||||
<logger name="org.nl.acs.agv.server.impl.NDCAgvServiceImpl" level="info" additivity="false"> |
|
||||
<appender-ref ref="NDCAgvServiceImpl"/> |
|
||||
</logger> |
|
||||
</included> |
|
@ -1,33 +0,0 @@ |
|||||
<?xml version="1.0" encoding="UTF-8"?> |
|
||||
<included> |
|
||||
<springProperty scope="context" name="logPath" source="logging.file.path" defaultValue="logs"/> |
|
||||
<property name="LOG_HOME" value="${logPath}"/> |
|
||||
<!-- 按照每天生成日志文件 --> |
|
||||
<appender name="NDCSocketConnectionAutoRun" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
|
||||
<!--日志文件输出的文件名--> |
|
||||
<FileNamePattern>${LOG_HOME}/NDC交互日志/%d{yyyy-MM-dd}.%i.log</FileNamePattern> |
|
||||
<!--日志文件保留天数--> |
|
||||
<maxHistory>15</maxHistory> |
|
||||
<!--单个日志最大容量 至少10MB才能看得出来--> |
|
||||
<maxFileSize>200MB</maxFileSize> |
|
||||
<!--所有日志最多占多大容量--> |
|
||||
<totalSizeCap>2GB</totalSizeCap> |
|
||||
</rollingPolicy> |
|
||||
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> |
|
||||
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符--> |
|
||||
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern> |
|
||||
<charset>${log.charset}</charset> |
|
||||
</encoder> |
|
||||
|
|
||||
</appender> |
|
||||
|
|
||||
<!-- <logger name="org.nl.start.Init" level="info" additivity="false"> |
|
||||
<appender-ref ref="FILE3"/> |
|
||||
</logger>--> |
|
||||
|
|
||||
<!-- 打印sql --> |
|
||||
<logger name="org.nl.start.auto.run.NDCSocketConnectionAutoRun" level="info" additivity="false"> |
|
||||
<appender-ref ref="NDCSocketConnectionAutoRun"/> |
|
||||
</logger> |
|
||||
</included> |
|
Loading…
Reference in new issue