文本数据怎么批量导入Elasticsearch
1个回答
2015-04-14 · 知道合伙人数码行家
关注
展开全部
input {
file {
type => "log"
#stat_interval => "\t"
path
=> "/home/hadoop/xinwang_XW351464_2110.log"
}
}
filter {
if
[path] =~ "xinwang_XW351464_2110" {
mutate { replace => { "type" =>
"apache_access" } }
grok {
match => { "message" =>
"%{COMBINEDAPACHELOG}" }
}
}
date {
match => [ "timestamp" ,
"dd/MMM/yyyy:HH:mm:ss Z" ]
}
}
output {
elasticsearch
{
#cluster => "logstash_ela"
#node_name=> "es_master"
host =>
"192.168.1.152"
index => "eslsg"
index_type => "type"
protocol
=> "http"
port => 9200
workers => 1
}
}
执行 ./logstash agent -v -f txtTes.conf 的时候出现:
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/postgresql",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/mongodb",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/mcollective",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/redis",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/java",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/ruby",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/junos",
:level=>:info}
Match data
{:match=>{"message"=>"%{COMBINEDAPACHELOG}"}, :level=>:info}
Grok
compile {:field=>"message", :patterns=>["%{COMBINEDAPACHELOG}"],
:level=>:info}
Pipeline started {:level=>:info}
New Elasticsearch
output {:cluster=>nil, :host=>"192.168.1.152", :port=>9200,
:embedded=>false, :protocol=>"http", :level=>:info}
Automatic
template management enabled {:manage_template=>"true",
:level=>:info}
Using mapping template {:template=>"{ \"template\" :
\"logstash-*\", \"settings\" : { \"index.refresh_interval\" : \"5s\" },
\"mappings\" : { \"_default_\" : { \"_all\" : {\"enabled\" : true},
\"dynamic_templates\" : [ { \"string_fields\" : { \"match\" : \"*\",
\"match_mapping_type\" : \"string\", \"mapping\" : { \"type\" : \"string\",
\"index\" : \"analyzed\", \"omit_norms\" : true, \"fields\" : { \"raw\" :
{\"type\": \"string\", \"index\" : \"not_analyzed\", \"ignore_above\" : 256} } }
} } ], \"properties\" : { \"@version\": { \"type\": \"string\", \"index\":
\"not_analyzed\" }, \"geoip\" : { \"type\" : \"object\", \"dynamic\": true,
\"path\": \"full\", \"properties\" : { \"location\" : { \"type\" : \"geo_point\"
} } } } } }}", :level=>:info}
file {
type => "log"
#stat_interval => "\t"
path
=> "/home/hadoop/xinwang_XW351464_2110.log"
}
}
filter {
if
[path] =~ "xinwang_XW351464_2110" {
mutate { replace => { "type" =>
"apache_access" } }
grok {
match => { "message" =>
"%{COMBINEDAPACHELOG}" }
}
}
date {
match => [ "timestamp" ,
"dd/MMM/yyyy:HH:mm:ss Z" ]
}
}
output {
elasticsearch
{
#cluster => "logstash_ela"
#node_name=> "es_master"
host =>
"192.168.1.152"
index => "eslsg"
index_type => "type"
protocol
=> "http"
port => 9200
workers => 1
}
}
执行 ./logstash agent -v -f txtTes.conf 的时候出现:
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/postgresql",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/mongodb",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/mcollective",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/redis",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/java",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/ruby",
:level=>:info}
Grok loading patterns from file
{:path=>"/home/hadoop/logstash-1.4.2/patterns/junos",
:level=>:info}
Match data
{:match=>{"message"=>"%{COMBINEDAPACHELOG}"}, :level=>:info}
Grok
compile {:field=>"message", :patterns=>["%{COMBINEDAPACHELOG}"],
:level=>:info}
Pipeline started {:level=>:info}
New Elasticsearch
output {:cluster=>nil, :host=>"192.168.1.152", :port=>9200,
:embedded=>false, :protocol=>"http", :level=>:info}
Automatic
template management enabled {:manage_template=>"true",
:level=>:info}
Using mapping template {:template=>"{ \"template\" :
\"logstash-*\", \"settings\" : { \"index.refresh_interval\" : \"5s\" },
\"mappings\" : { \"_default_\" : { \"_all\" : {\"enabled\" : true},
\"dynamic_templates\" : [ { \"string_fields\" : { \"match\" : \"*\",
\"match_mapping_type\" : \"string\", \"mapping\" : { \"type\" : \"string\",
\"index\" : \"analyzed\", \"omit_norms\" : true, \"fields\" : { \"raw\" :
{\"type\": \"string\", \"index\" : \"not_analyzed\", \"ignore_above\" : 256} } }
} } ], \"properties\" : { \"@version\": { \"type\": \"string\", \"index\":
\"not_analyzed\" }, \"geoip\" : { \"type\" : \"object\", \"dynamic\": true,
\"path\": \"full\", \"properties\" : { \"location\" : { \"type\" : \"geo_point\"
} } } } } }}", :level=>:info}
本回答被提问者和网友采纳
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询