docker-compose安装elk
创建目录和配置文件
cd /
mkdir elk
mkdir /elk/logstash
touch /elk/logstash/logstash.conf
cd /elk/logstash/
vim logstash.conf
添加配置信息
input {
# 采集redis内 key为 uav:data,数据类型为list的数据
redis {
host => "127.0.0.1"
port => 6379
type => "redis-input"
data_type => "list"
key => "uav:data"
threads => 5
codec => "json"
}
# upms为java服务,通过logback的日志输出到4560端口
tcp {
add_field => {"service" => "upms"}
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
}
tcp {
add_field => {"service" => "auth"}
mode => "server"
host => "0.0.0.0"
port => 4561
codec => json_lines
}
}
output {
if [service] == "upms"{
elasticsearch {
hosts => "127.0.0.1:9200"
index => "upms-logstash-%{+YYYY.MM.dd}"
}
}
if [service] == "auth"{
elasticsearch {
hosts => "127.0.0.1:9200"
index => "auth-logstash-%{+YYYY.MM.dd}"
}
}
elasticsearch {
hosts => "127.0.0.1:9200"
index => "redis-uav-%{+YYYY.MM.dd}"
}
创建docker-compose文件
touch /elk/docker-compose.yml
version: '3.7'
services:
elasticsearch:
image: elasticsearch:7.2.0
container_name: elasticsearch
privileged: true
user: root
environment:
#设置集群名称为elasticsearch
- cluster.name=elasticsearch
#以单一节点模式启动
- discovery.type=single-node
#设置使用jvm内存大小
- ES_JAVA_OPTS=-Xms512m -Xmx512m
volumes:
- /elk/elasticsearch/plugins:/usr/share/elasticsearch/plugins
- /elk/elasticsearch/data:/usr/share/elasticsearch/data
ports:
- 9200:9200
- 9300:9300
logstash:
image: logstash:7.2.0
container_name: logstash
ports:
- 4560:4560
environment:
- TZ=Asia/Shanghai
volumes:
#挂载logstash的配置文件
- /elk/logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
depends_on:
- elasticsearch
links:
#可以用es这个域名访问elasticsearch服务
- elasticsearch:es
kibana:
image: kibana:7.2.0
container_name: kibana
ports:
- 5601:5601
links:
#可以用es这个域名访问elasticsearch服务
- elasticsearch:es
depends_on:
#kibana在elasticsearch启动之后再启动
- elasticsearch
environment:
#设置访问elasticsearch的地址
- elasticsearch.hosts=http://elasticsearch:9200
给文件夹授权,启动服务,安装插件
mkdir -p /elk/elasticsearch/data
mkdir -p /elk/elasticsearch/plugins
chmod 777 /elk/elasticsearch/data
docker-compose up -d
# logstash 安装json_lines 格式插件
# 进入logstash容器
docker exec -it logstash /bin/bash
# 进入bin目录
cd /bin/
# 安装插件
logstash-plugin install logstash-codec-json_lines
# 退出容器
exit
# 重启logstash服务
docker restart logstash
服务整合Logstash (以UPMS模块为例)
- 添加pom 依赖
<!--集成logstash-->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>5.3</version>
</dependency>
- logback-spring.xml 新增appender
<!--输出到logstash的appender-->
<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!--可以访问的logstash日志收集端口-->
<destination>192.168.0.31:4560</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"/>
</appender>
<root level="INFO">
<appender-ref ref="LOGSTASH"/>
</root>