docker搭建ELK日志采集并查询日志
docker安装ELK日志采集并数据的查询,集成到springboot中进行数据获取
·
1. docker安装Elasticsearch
docker pull elasticsearch:7.6.2
mkdir -p /mydata/elasticsearch/plugins /mydata/elasticsearch/data
chmod 777 /mydata/elasticsearch/data/
docker run -p 9200:9200 -p 9300:9300 --name elasticsearch \
-e "discovery.type=single-node" \
-e "cluster.name=elasticsearch" \
-v /mydata/elasticsearch/plugins:/usr/share/elasticsearch/plugins \
-v /mydata/elasticsearch/data:/usr/share/elasticsearch/data \
-d elasticsearch:7.6.2
2. docker安装Logstash
docker pull logstash:7.6.2
mkdir /mydata/logstash
cd /mydata/logstash
vim logstash.conf
#将下面的配置放置到logstash.conf文件中
input {
tcp {
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
type => "debug"
}
tcp {
mode => "server"
host => "0.0.0.0"
port => 4561
codec => json_lines
type => "error"
}
tcp {
mode => "server"
host => "0.0.0.0"
port => 4562
codec => json_lines
type => "business"
}
tcp {
mode => "server"
host => "0.0.0.0"
port => 4563
codec => json_lines
type => "record"
}
}
filter{
if [type] == "record" {
mutate {
remove_field => "port"
remove_field => "host"
remove_field => "@version"
}
json {
source => "message"
remove_field => ["message"]
}
}
}
output {
elasticsearch {
hosts => "es:9200"
index => "logback-%{type}-%{+YYYY.MM.dd}"
}
}
docker run --name logstash -p 4560:4560 -p 4561:4561 -p 4562:4562 -p 4563:4563 \
--link elasticsearch:es \
-v /mydata/logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf \
-d logstash:7.6.2
3. docker安装Kibana
docker pull kibana:7.6.2
docker run --name kibana -p 5601:5601 \
--link elasticsearch:es \
-e "elasticsearch.hosts=http://es:9200" \
-d kibana:7.6.2
4. springboot集成logstash
pom.xml引入maven依赖:
<!--集成logstash-->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.3</version>
</dependency>
resources下创建logback-spring.xml文件,配置如下,ip改成自己部署的logstash服务ip:
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<springProperty scope="context" name="springAppName"
source="spring.application.name" />
<!-- 日志在工程中的输出位置 -->
<property name="LOG_FILE" value="${BUILD_FOLDER:-build}/${springAppName}" />
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 为logstash输出的JSON格式的Appender -->
<appender name="logstash"
class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>ip:4561</destination>
<!-- 日志输出编码 -->
<encoder
class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"severity": "%level",
"service": "${springAppName:-}",
"trace": "%X{X-B3-TraceId:-}",
"span": "%X{X-B3-SpanId:-}",
"exportable": "%X{X-Span-Export:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger{40}",
"rest": "%message"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="console" />
<appender-ref ref="logstash" />
</root>
</configuration>
5. 查询记录的日志
启动项目,随便写一个controller,记录日志
@RestController
public class TestLogController {
/**
* 获取日志输出对象
*/
private static final Logger log = LoggerFactory.getLogger(TestLogController.class);
/**
* 测试输出log的访问方法
*/
@GetMapping("/testLog")
public String testLog() {
log.error("测试输出一个日志");
return "success";
}
}
查询方式一:通过kibana
访问地址:http://ip:5601/
最终可以查看到记录的日志信息
查询方式二:通过接口访问
添加依赖-添加实体类-添加service类-添加controller类-访问接口
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
@Data public class LogEntry {
private String date;
private String message;
private String type;
private String service;
public LogEntry(String date, String message,String type,String service) {
this.date = date;
this.message = message;
this.type = type;
this.service = service;
}
}
@Service
public class ElasticsearchService {
private final RestHighLevelClient client;
@Autowired
public ElasticsearchService(RestHighLevelClient client) {
this.client = client;
}
public List<LogEntry> getAllLogs() {
List<LogEntry> logEntries = new ArrayList<>();
try {
// 构建搜索请求--索引+倒序排序+查询所有
SearchRequest searchRequest = new SearchRequest("logback-error-*");
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(QueryBuilders.matchAllQuery());
searchSourceBuilder.sort("@timestamp", SortOrder.DESC);
searchRequest.source(searchSourceBuilder);
// 执行搜索请求
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
// 处理搜索结果
for (SearchHit hit : searchResponse.getHits().getHits()) {
Map<String, Object> sourceAsMap = hit.getSourceAsMap();
String date = (String) sourceAsMap.get("@timestamp");
String type = (String) sourceAsMap.get("type");
String rest = (String) sourceAsMap.get("rest");
String service = (String) sourceAsMap.get("service");
logEntries.add(new LogEntry(date, rest,type,service));
}
} catch (IOException e) {
e.printStackTrace();
}
return logEntries;
}
}
@RestController
@RequestMapping("/api")
public class ElasticsearchController {
private final ElasticsearchService elasticsearchService;
@Autowired
public ElasticsearchController(ElasticsearchService elasticsearchService) {
this.elasticsearchService = elasticsearchService;
}
@GetMapping("/search")
public List<LogEntry> searchAllData() {
return elasticsearchService.getAllLogs();
}
}
最终可以看到查询出的结果
更多推荐
已为社区贡献1条内容
所有评论(0)