ELK如何收集群晖存储日志
以下几项提前做修改:
1、docker-compose.yml文件新增映射5514端口给群晖发送日志使用,默认的5044是给beats客户端使用的,完整的配置文件如下:
version: '3.7'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.4.3
container_name: elasticsearch
restart: unless-stopped
environment:
- node.name=elasticsearch
- discovery.type=single-node
- xpack.security.enabled=true
- ELASTIC_PASSWORD=修改为自己设置的密码
- bootstrap.memory_lock=true
- ES_JAVA_OPTS=-Xms1g -Xmx1g
ports:
- "9200:9200"
- "9300:9300"
volumes:
- esdata:/usr/share/elasticsearch/data
ulimits:
memlock:
soft: -1
hard: -1
networks:
- elk
kibana:
image: docker.elastic.co/kibana/kibana:8.4.3
container_name: kibana
restart: unless-stopped
environment:
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
- ELASTICSEARCH_SERVICEACCOUNTTOKEN=修改为步骤2的服务账号令牌
ports:
- "5601:5601"
depends_on:
- elasticsearch
networks:
- elk
logstash:
image: docker.elastic.co/logstash/logstash:8.4.3
container_name: logstash
restart: unless-stopped
environment:
- xpack.monitoring.enabled=true
- xpack.monitoring.elasticsearch.username=修改为步骤三创建的账号
- xpack.monitoring.elasticsearch.password=修改为步骤三创建的密码
- xpack.monitoring.elasticsearch.hosts=http://elasticsearch:9200
ports:
- "5044:5044"
- "5514:5514"
- "9600:9600"
volumes:
- ./logstash/pipeline:/usr/share/logstash/pipeline
depends_on:
- elasticsearch
networks:
- elk
volumes:
esdata:
networks:
elk:
driver: bridge
2、logstash.conf中新增群晖的配置,完整的配置文件如下:
##################################################
# 🟢 INPUT:Syslog(群晖) + Beats(Filebeat/Winlogbeat)
##################################################
input {
tcp {
port => 5514
type => "syslog"
}
udp {
port => 5514
type => "syslog"
}
beats {
port => 5044
ssl => false
type => "beats"
}
}
##################################################
# 🧠 FILTER:按来源类型分别处理
##################################################
filter {
##################################################
# 🧩 群晖日志处理(Syslog)
##################################################
if [type] == "syslog" {
grok {
match => {
"message" => "%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host} %{DATA:process}(?:\
\[%{NUMBER:pid}\\]
)?: %{GREEDYDATA:msg}"
}
}
mutate {
rename => { "host" => "host.name" }
add_field => {
"event.module" => "synology"
"log.level" => "info"
}
remove_field => ["timestamp"]
}
date {
match => ["timestamp", "MMM dd HH:mm:ss"]
target => "@timestamp"
}
# ✅ 类型转换(示例字段)
mutate {
convert => {
"status_code" => "integer"
"duration" => "float"
"success" => "boolean"
}
lowercase => [ "printer_name" ]
}
}
##################################################
# 🧩 Beats 日志处理(Filebeat / Winlogbeat)
##################################################
else if [type] == "beats" {
# 🌐 Nginx 日志解析
if [event][module] == "nginx" {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
overwrite => [ "message" ]
}
}
# 🖥️ System 日志解析
if [event][module] == "system" {
grok {
match => {
"message" => [
"%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_host} %{DATA:syslog_program}: %{GREEDYDATA:syslog_message}"
]
}
}
}
# 🔧 通用元数据处理
mutate {
add_field => {
"[@metadata][host]" => "%{[host][name]}"
"[@metadata][env]" => "%{environment}"
}
}
date {
match => [ "timestamp", "ISO8601" ]
target => "@timestamp"
}
# ✅ 类型转换(示例字段)
mutate {
convert => {
"status_code" => "integer"
"duration" => "float"
"success" => "boolean"
}
lowercase => [ "printer_name" ]
}
}
##################################################
# ❌ 丢弃其他类型(防止混入)
##################################################
else {
drop {}
}
}
##################################################
# 📤 OUTPUT:按来源模块写入不同索引
##################################################
output {
# 群晖日志输出
if [event.module] == "synology" {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "synology-new-logs-%{+YYYY.MM.dd}"
user => "修改为步骤三创建的账号"
password => "修改为步骤三创建的密码"
}
}
# Beats 日志输出(按主机名 + 环境 + 模块)
else if [type] == "beats" {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "%{[@metadata][host]}-%{[@metadata][env]}-%{[event][module]}-logs-%{+YYYY.MM.dd}"
user => "修改为步骤三创建的账号"
password => "修改为步骤三创建的密码"
}
}
}
🎯 推荐架构
日志来源 | 协议类型 | Logstash 端口 | 推荐方式 |
---|---|---|---|
群晖 DSM | Syslog | 5514 | ✅ 使用 TCP/UDP |
Windows | Winlogbeat | 5044 | ✅ 使用 Beats |
Linux | Filebeat | 5044 | ✅ 使用 Beats |
🧠群晖 DSM 中配置日志发送
- 控制面板 → 系统日志 → 日志发送
- 设置服务器地址为 Logstash 容器所在主机 IP
- 端口为
5514
- 协议为
TCP
或UDP
(推荐 TCP)