logstash 拆分事件字段值并分配给@metadata 字段
logstash splits event field values and assign to @metadata field
我有一个 logstash
event
,它有以下字段
{
"_index": "logstash-2016.08.09",
"_type": "log",
"_id": "AVZvz2ix",
"_score": null,
"_source": {
"message": "function_name~execute||line_no~128||debug_message~id was not found",
"@version": "1",
"@timestamp": "2016-08-09T14:57:00.147Z",
"beat": {
"hostname": "coredev",
"name": "coredev"
},
"count": 1,
"fields": null,
"input_type": "log",
"offset": 22299196,
"source": "/project_root/project_1/log/core.log",
"type": "log",
"host": "coredev",
"tags": [
"beats_input_codec_plain_applied"
]
},
"fields": {
"@timestamp": [
1470754620147
]
},
"sort": [
1470754620147
]
}
我想知道如何使用 filter
(也许 kv
?)从 "source": "/project_root/project_1/log/core.log"
中提取 core.log
,并将其放入例如[@metadata][log_type]
,等等,我可以在output
中使用log_type
来创建一个唯一的index
,由主机名+日志类型+时间戳组成,例如
output {
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "%{[@metadata][_source][host]}-%{[@metadata][log_type]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
stdout { codec => rubydebug }
}
您可以利用 mutate/gsub
filter 来实现此目的:
filter {
# add the log_type metadata field
mutate {
add_field => {"[@metadata][log_type]" => "%{source}"}
}
# remove everything up to the last slash
mutate {
gsub => [ "[@metadata][log_type]", "^.*\/", "" ]
}
}
然后您可以像这样修改 elasticsearch
输出:
output {
elasticsearch {
hosts => ["localhost:9200"]
manage_template => false
index => "%{host}-%{[@metadata][log_type]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
stdout { codec => rubydebug }
}
我有一个 logstash
event
,它有以下字段
{
"_index": "logstash-2016.08.09",
"_type": "log",
"_id": "AVZvz2ix",
"_score": null,
"_source": {
"message": "function_name~execute||line_no~128||debug_message~id was not found",
"@version": "1",
"@timestamp": "2016-08-09T14:57:00.147Z",
"beat": {
"hostname": "coredev",
"name": "coredev"
},
"count": 1,
"fields": null,
"input_type": "log",
"offset": 22299196,
"source": "/project_root/project_1/log/core.log",
"type": "log",
"host": "coredev",
"tags": [
"beats_input_codec_plain_applied"
]
},
"fields": {
"@timestamp": [
1470754620147
]
},
"sort": [
1470754620147
]
}
我想知道如何使用 filter
(也许 kv
?)从 "source": "/project_root/project_1/log/core.log"
中提取 core.log
,并将其放入例如[@metadata][log_type]
,等等,我可以在output
中使用log_type
来创建一个唯一的index
,由主机名+日志类型+时间戳组成,例如
output {
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "%{[@metadata][_source][host]}-%{[@metadata][log_type]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
stdout { codec => rubydebug }
}
您可以利用 mutate/gsub
filter 来实现此目的:
filter {
# add the log_type metadata field
mutate {
add_field => {"[@metadata][log_type]" => "%{source}"}
}
# remove everything up to the last slash
mutate {
gsub => [ "[@metadata][log_type]", "^.*\/", "" ]
}
}
然后您可以像这样修改 elasticsearch
输出:
output {
elasticsearch {
hosts => ["localhost:9200"]
manage_template => false
index => "%{host}-%{[@metadata][log_type]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
stdout { codec => rubydebug }
}