标签:elasticsearch logstash kibana
java -version java version "1.7.0_45" Java(TM) SE Runtime Environment (build 1.7.0_45-b18) Java HotSpot(TM) 64-Bit Server VM (build 24.45-b08, mixed mode)
curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.5.1.tar.gz tar zxvf elasticsearch-1.5.1.tar.gz cd elasticsearch-1.5.1/ ./bin/elasticsearch
curl -O http://download.elastic.co/logstash/logstash/logstash-1.5.1.tar.gz
tar zxvf logstash-1.4.2.tar.gz
cd logstash-1.5.1
bin/logstash -e 'input { stdin { } } output { stdout {} }'
bin/logstash -e 'input { stdin { } } output { stdout { codec => rubydebug } }'
goodnight moon { "message" => "goodnight moon", "@timestamp" => "2013-11-20T23:48:05.335Z", "@version" => "1", "host" => "my-laptop" }
input{ file{ type => "all" path => "/root/tomcat7/logs/catalina.out" } file{ type => "access" path => "/root/tomcat7/logs/access.log" } }filter { multiline { pattern => "^[^\[]" what => "previous" } if [type] == "access" { grok { pattern => "(?<request_info>{.*}$)" } json { source => request_info } geoip { source => "client_ip" fields => ["country_name", "region_name", "city_name", "real_region_name", "latitude", "longitude"] remove_field => [ "[geoip][longitude]", "[geoip][latitude]","location","region_name" ] } useragent { source => "user_agent" prefix => "useragent_" remove_field => [ "useragent_device", "useragent_major", "useragent_minor" ,"useragent_patch","useragent_os","useragent_o s_major","useragent_os_minor"] } } else if [type] == 'all' { grok { pattern => "\[(?<level>\w*).*\] (?<datetime>\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2},\d{3})\s" } } mutate { remove_field => [ "request_info", "@version", "tags" ] remove_tag => [ "_grokparsefailure" ] replace => [ "host", "gd1_prd_yowoo_tomcat4" ] } } output { stdout { codec => rubydebug } elasticsearch { host => "localhost" index => "logstash-%{type}-%{+YYYY.MM.dd}" index_type => "%{type}" } }
sh logstash -f /root/config/logs.conf
<?xml version="1.0" encoding="UTF-8" ?> <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> <log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> <!-- all log for console --> <appender name="console" class="org.apache.log4j.ConsoleAppender"> <layout class="org.apache.log4j.PatternLayout"> <param name="ConversionPattern" value="[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} %l %M - %m%n" /> </layout> </appender> <!-- access log --> <appender name="access" class="org.apache.log4j.DailyRollingFileAppender"> <layout class="org.apache.log4j.PatternLayout"> <param name="ConversionPattern" value="[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} - %m%n" /> </layout> <param name="Append" value="true" /> <param name="File" value="/root/tomcat7/logs/access.log"<span style="font-family: Arial, Helvetica, sans-serif;"> /></span> <param name="DatePattern" value="'.'yyyy-MM-dd'.'" /> <filter class="com.lives.platform.common.log.AccessLogFilter" /> </appender> <root> <priority value="debug" /> <appender-ref ref="console" /> <appender-ref ref="access" /> </root> </log4j:configuration>
wget https://download.elastic.co/kibana/kibana/kibana-4.1.0-linux-x64.tar.gz
# Kibana is served by a back end server. This controls which port to use. port: 5601 # The host to bind the server to. host: "0.0.0.0" # The Elasticsearch instance to use for all your queries. elasticsearch_url: "http://localhost:9200"
Elasticsearch+Kibana+Logstash 搭建日志平台
标签:elasticsearch logstash kibana
原文地址:http://blog.csdn.net/u014201191/article/details/46537543