自动化运维之ELK

guiyun affiliate

运维痛点场景

【场景一】

开发:HI,哥们,我刚才上线了一个新功能,帮我看下运行日志有没有什么异常......

运维:好的,稍等


【场景二】

开发:HI,哥们,目前A接口的访问量挂掉了,帮我看下错误日志有啥ERROR......

运维:好的,马上


【场景三】

开发:HI,哥们,帮我把LOG目录下的日志文件帮我取下来......

运维:好的,等下发给你


日志维护痛点

【1】开发人员不能登录线上服务器查看详细的日志

【2】各个系统都有日志,日志数据分散难以查找

【3】日志数据量大,查询速度慢,数据不够实时


日志收集-->日志存储-->日志搜索/展示/统计-->日志可视化


Elasticsearch + Logstash + Kibana = ELK Stack


Elasticsearch:日志存储/搜索/分析

Logstash:日志收集

Kibana:日志展示


日志收集最好的开发语言:JAVA


ELK Stack官方网站:https://www.elastic.co


Elasticsearch官方文档:https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html

Logstash官方文档:https://www.elastic.co/guide/en/logstash/current/index.html

Kibana官方文档:https://www.elastic.co/guide/en/kibana/current/index.html


【1】Elasticsearch简介:基于Lucene,RESTful Web接口,使用JAVA开发

【2】Logstash简介:基于Ruby

【3】Kibana简介


Elasticsearch插件(https://www.elastic.co/guide/en/elasticsearch/plugins/5.5/index.html)

【1】Head

【2】Kopf

【3】Bigdesk

【4】Analysis-ik

【5】Marvel

Install Java
[root@linux-node1 ~]# yum -y install java
[root@linux-node1 ~]# java -version
openjdk version "1.8.0_141"
OpenJDK Runtime Environment (build 1.8.0_141-b16)
OpenJDK 64-Bit Server VM (build 25.141-b16, mixed mode)

Download and install the public signing key
[root@linux-node1 ~]# rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch

Installing from the rpm repository
[root@linux-node1 ~]# vim /etc/yum.repos.d/elasticsearch.repo
[elasticsearch-5.x]
name=Elasticsearch repository for 5.x packages
baseurl=https://artifacts.elastic.co/packages/5.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md

Install Elasticsearch
[root@linux-node1 ~]# yum -y install elasticsearch
[root@linux-node1 ~]# rpm -qa elasticsearch
elasticsearch-5.5.1-1.noarch
[root@linux-node1 ~]# chkconfig --level 3 elasticsearch on
[root@linux-node1 ~]# chkconfig --list elasticsearch
elasticsearch   0:off   1:off   2:on    3:on    4:on    5:on    6:off
[root@linux-node1 ~]# rpm -qc elasticsearch
/etc/elasticsearch/elasticsearch.yml
/etc/elasticsearch/jvm.options
/etc/elasticsearch/log4j2.properties
/etc/elasticsearch/scripts
/etc/init.d/elasticsearch
/etc/sysconfig/elasticsearch
/usr/lib/sysctl.d/elasticsearch.conf
/usr/lib/systemd/system/elasticsearch.service

Configure Elasticsearch
[root@linux-node1 ~]# cd /etc/elasticsearch/
[root@linux-node1 elasticsearch]# cp -a elasticsearch.yml elasticsearch.yml_$(date +%F)
[root@linux-node1 elasticsearch]# vim elasticsearch.yml
17 cluster.name: kw_es
23 node.name: kw_es_node1
33 path.data: /data/elasticsearch
37 path.logs: /var/log/elasticsearch
43 bootstrap.mlockall: true
44 bootstrap.system_call_filter: false
56 network.host: 192.168.100.129
60 http.port: 9200
69 discovery.zen.ping.unicast.hosts: ["192.168.100.129","192.168.100.130"]
[root@linux-node1 elasticsearch]# grep "^[a-Z]" elasticsearch.yml
cluster.name: kw_es
node.name: kw_es_node1
path.data: /data/elasticsearch
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: true
bootstrap.system_call_filter: false
network.host: 192.168.100.129
http.port: 9200
discovery.zen.ping.unicast.hosts: ["192.168.100.129","192.168.100.130"]
[root@linux-node1 elasticsearch]# mkdir /data/elasticsearch -p
[root@linux-node1 elasticsearch]# ls -ld /data/elasticsearch/
drwxr-xr-x 2 root root 4096 Aug  2 11:42 /data/elasticsearch/
[root@linux-node1 elasticsearch]# chown -R elasticsearch:elasticsearch /data/elasticsearch/
[root@linux-node1 elasticsearch]# ls -ld /data/elasticsearch/ /var/log/elasticsearch/
drwxr-xr-x 2 elasticsearch elasticsearch 4096 Aug  2 11:42 /data/elasticsearch/
drwxr-x--- 2 elasticsearch elasticsearch 4096 Jul 19 04:47 /var/log/elasticsearch/
[root@linux-node1 elasticsearch]# vim /etc/security/limits.conf
*               -       nofile          65536
#Allow User 'elasticsearch' Mlockall
elasticsearch   soft    memlock         unlimited
elasticsearch   hard    memlock         unlimited
[root@linux-node1 elasticsearch]# tail -4 /etc/security/limits.conf
*               -       nofile          65536
#Allow User 'elasticsearch' Mlockall
elasticsearch   soft    memlock         unlimited
elasticsearch   hard    memlock         unlimited
[root@linux-node1 elasticsearch]# vim /etc/security/limits.d/90-nproc.conf
*          soft    nproc     2048
root       soft    nproc     unlimited
[root@linux-node1 elasticsearch]# vim jvm.options
22 -Xms512m
23 -Xmx512m
[root@linux-node1 elasticsearch]# /etc/init.d/elasticsearch start
Starting elasticsearch:                                    [  OK  ]
[root@linux-node1 elasticsearch]# netstat -tnlup|grep "9200"|grep -v "grep"
tcp        0      0 ::ffff:192.168.100.129:9200 :::*                        LISTEN      1492/java
[root@linux-node1 elasticsearch]# ps -ef|grep elasticsearch|grep -v "grep"
498       1332     1 64 11:45 ?        00:00:57 /usr/bin/java -Xms512m -Xmx512m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+AlwaysPreTouch -server -Xss1m -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djna.nosys=true -Djdk.io.permissionsUseCanonicalPath=true -Dio.netty.noUnsafe=true -Dio.netty.noKeySetOptimization=true -Dio.netty.recycler.maxCapacityPerThread=0 -Dlog4j.shutdownHookEnabled=false -Dlog4j2.disable.jmx=true -Dlog4j.skipJansi=true -XX:+HeapDumpOnOutOfMemoryError -Des.path.home=/usr/share/elasticsearch -cp /usr/share/elasticsearch/lib/* org.elasticsearch.bootstrap.Elasticsearch -p /var/run/elasticsearch/elasticsearch.pid -d -Edefault.path.logs=/var/log/elasticsearch -Edefault.path.data=/var/lib/elasticsearch -Edefault.path.conf=/etc/elasticsearch
[root@linux-node1 elasticsearch]# lsof -i:9200
COMMAND  PID          USER   FD   TYPE DEVICE SIZE/OFF NODE NAME
java    1332 elasticsearch  113u  IPv6  11705      0t0  TCP linux-node1:wap-wsp (LISTEN)

浏览器访问:http://192.168.100.129:9200
{
  "name" : "kw_es_node1",
  "cluster_name" : "kw_es",
  "cluster_uuid" : "Yfrcb5qkSvevskn8Rfm6Ew",
  "version" : {
    "number" : "5.5.1",
    "build_hash" : "19c13d0",
    "build_date" : "2017-07-18T20:44:24.823Z",
    "build_snapshot" : false,
    "lucene_version" : "6.6.0"
  },
  "tagline" : "You Know, for Search"
}

统计Elasticsearch索引数量
[root@linux-node1 elasticsearch]# curl -i -XGET 'http://192.168.100.129:9200/_count?pretty' -d '
{
"query": {
      "match_all": {}
 }
}'
HTTP/1.1 200 OK
Content-Type: application/json; charset=UTF-8
Content-Length: 95

{
  "count" : 0,
  "_shards" : {
    "total" : 0,
    "successful" : 0,
    "failed" : 0
  }
}

安装Head插件(elasticsearch5.0版本变化较大,目前elasticsearch5.0暂时不支持直接安装,但是head作者提供了另一种安装方法)
[root@linux-node1 ~]# tar xvf node-v6.11.2-linux-x64.tar.xz
[root@linux-node1 ~]# mv node-v6.11.2-linux-x64 /usr/local/node
[root@linux-node1 ~]# vim /etc/profile
export NODE_HOME=/usr/local/node
export PATH=$PATH:$NODE_HOME/bin
[root@linux-node1 ~]# source /etc/profile
[root@linux-node1 ~]# which node
/usr/local/node/bin/node
[root@linux-node1 ~]# which npm
/usr/local/node/bin/npm
[root@linux-node1 ~]# node -v
v6.11.2
[root@linux-node1 ~]# npm -v
3.10.10
[root@linux-node1 ~]# git clone git://github.com/mobz/elasticsearch-head.git
Initialized empty Git repository in /root/elasticsearch-head/.git/
remote: Counting objects: 4067, done.
Receiving objects: 100% (4067/4067), 2.10 MiB | 62 KiB/s, done.
remote: Total 4067 (delta 0), reused 0 (delta 0), pack-reused 4067
Resolving deltas: 100% (2224/2224), done.
[root@linux-node1 ~]# cd elasticsearch-head/
[root@linux-node1 elasticsearch-head]# npm install -g grunt --registry=https://registry.npm.taobao.org
[root@linux-node1 elasticsearch-head]# npm install -g cnpm --registry=https://registry.npm.taobao.org
[root@linux-node1 elasticsearch-head]# npm install grunt-contrib-jasmine --registry=https://registry.npm.taobao.org
[root@linux-node1 elasticsearch-head]# vim Gruntfile.js
90                 connect: {
91                         server: {
92                                 options: {
93                                         hostname: '192.168.100.129',
94                                         port: 9100,
95                                         base: '.',
96                                         keepalive: true
97                                 }
98                         }
99                 }
[root@linux-node1 elasticsearch-head]# vim _site/app.js
4329                         this.base_uri = this.config.base_uri || this.prefs.get("app-base_uri") || "http://192.168.100.130:9200";
[root@linux-node1 elasticsearch-head]# vim /etc/elasticsearch/elasticsearch.yml
http.cors.enabled: true
http.cors.allow-origin: "*"
[root@linux-node1 elasticsearch-head]# /root/elasticsearch-head/node_modules/grunt/bin/grunt server &
[1] 26941
[root@linux-node1 elasticsearch-head]# Running "connect:server" (connect) task
Waiting forever...
Started connect web server on http://192.168.100.129:9100
[root@linux-node1 elasticsearch-head]# netstat -tnlup|grep "9100"|grep -v "grep"
tcp        0      0 192.168.100.129:9100        0.0.0.0:*                   LISTEN      26941/grunt
[root@linux-node1 elasticsearch-head]# ps -ef|grep "grunt"|grep -v "grep"
root     26941  1030  3 15:35 pts/0    00:00:03 grunt
[root@linux-node1 elasticsearch-head]# lsof -i:9100
COMMAND   PID USER   FD   TYPE DEVICE SIZE/OFF NODE NAME
grunt   26941 root   12u  IPv4  84661      0t0  TCP linux-node1:jetdirect (LISTEN)
[root@linux-node1 elasticsearch-head]# /etc/init.d/elasticsearch restart
Stopping elasticsearch:                                    [  OK  ]
Starting elasticsearch:                                    [  OK  ]

浏览器访问:http://192.168.100.129:9100
复合查询
http://192.168.100.129:9200/
song001/list001/1/			POST
{
   "number": "48817",
   "singer": "林斌",
   "size": "1024"
}

提交请求

返回结果
{
"_index": "song001",
"_type": "list001",
"_id": "1",
"_version": 1,
"result": "created",
"_shards": {
"total": 2,
"successful": 1,
"failed": 0
},
"created": true
}

复合查询
http://192.168.100.129:9200/
song001/list001/1/			GET
{}
提交请求

返回结果
{
"_index": "song001",
"_type": "list001",
"_id": "1",
"_version": 1,
"found": true,
"_source": {
"number": "48817",
"singer": "林斌",
"size": "1024"
}
}

安装Bigdesk插件
[root@linux-node1 ~]# git clone https://github.com/hlstudio/bigdesk
Initialized empty Git repository in /root/bigdesk/.git/
remote: Counting objects: 151, done.
Receiving objects: 100% (151/151), 348.16 KiB | 254 KiB/s, done.
remote: Total 151 (delta 0), reused 0 (delta 0), pack-reused 151
Resolving deltas: 100% (57/57), done.
[root@linux-node1 ~]# cd bigdesk/_site/
[root@linux-node1 _site]# python -m SimpleHTTPServer 52113 &
[2] 27474

浏览器访问:http://192.168.100.129:52113

Configure Elasticsearch Cluster
Install Java
[root@linux-node2 ~]# yum -y install java
[root@linux-node2 ~]# java -version
openjdk version "1.8.0_141"
OpenJDK Runtime Environment (build 1.8.0_141-b16)
OpenJDK 64-Bit Server VM (build 25.141-b16, mixed mode)

Download and install the public signing key
[root@linux-node2 ~]# rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch

Installing from the rpm repository
[root@linux-node2 ~]# vim /etc/yum.repos.d/elasticsearch.repo
[elasticsearch-5.x]
name=Elasticsearch repository for 5.x packages
baseurl=https://artifacts.elastic.co/packages/5.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md

Install Elasticsearch
[root@linux-node2 ~]# yum -y install elasticsearch
[root@linux-node2 ~]# rpm -qa elasticsearch
elasticsearch-5.5.1-1.noarch
[root@linux-node2 ~]# chkconfig --level 3 elasticsearch on
[root@linux-node2 ~]# chkconfig --list elasticsearch
elasticsearch   0:off   1:off   2:on    3:on    4:on    5:on    6:off
[root@linux-node2 ~]# rpm -qc elasticsearch
/etc/elasticsearch/elasticsearch.yml
/etc/elasticsearch/jvm.options
/etc/elasticsearch/log4j2.properties
/etc/elasticsearch/scripts
/etc/init.d/elasticsearch
/etc/sysconfig/elasticsearch
/usr/lib/sysctl.d/elasticsearch.conf
/usr/lib/systemd/system/elasticsearch.service

Configure Elasticsearch
[root@linux-node2 ~]# cd /etc/elasticsearch/
[root@linux-node2 elasticsearch]# cp -a elasticsearch.yml elasticsearch.yml_$(date +%F)
[root@linux-node2 elasticsearch]# vim elasticsearch.yml
17 cluster.name: kw_es
23 node.name: kw_es_node2
33 path.data: /data/elasticsearch
37 path.logs: /var/log/elasticsearch
43 bootstrap.mlockall: true
44 bootstrap.system_call_filter: false
56 network.host: 192.168.100.130
60 http.port: 9200
69 discovery.zen.ping.unicast.hosts: ["192.168.100.129","192.168.100.130"]
[root@linux-node2 elasticsearch]# grep "^[a-Z]" elasticsearch.yml
cluster.name: kw_es
node.name: kw_es_node2
path.data: /data/elasticsearch
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: true
bootstrap.system_call_filter: false
network.host: 192.168.100.130
http.port: 9200
discovery.zen.ping.unicast.hosts: ["192.168.100.129","192.168.100.130"]
[root@linux-node2 elasticsearch]# mkdir /data/elasticsearch -p
[root@linux-node2 elasticsearch]# ls -ld /data/elasticsearch/
drwxr-xr-x 2 root root 4096 Aug  2 11:42 /data/elasticsearch/
[root@linux-node2 elasticsearch]# chown -R elasticsearch:elasticsearch /data/elasticsearch/
[root@linux-node2 elasticsearch]# ls -ld /data/elasticsearch/ /var/log/elasticsearch/
drwxr-xr-x 2 elasticsearch elasticsearch 4096 Aug  2 11:42 /data/elasticsearch/
drwxr-x--- 2 elasticsearch elasticsearch 4096 Jul 19 04:47 /var/log/elasticsearch/
[root@linux-node2 elasticsearch]# vim /etc/security/limits.conf
*               -       nofile          65536
#Allow User 'elasticsearch' Mlockall
elasticsearch   soft    memlock         unlimited
elasticsearch   hard    memlock         unlimited
[root@linux-node2 elasticsearch]# tail -4 /etc/security/limits.conf
*               -       nofile          65536
#Allow User 'elasticsearch' Mlockall
elasticsearch   soft    memlock         unlimited
elasticsearch   hard    memlock         unlimited
[root@linux-node2 elasticsearch]# vim /etc/security/limits.d/90-nproc.conf
*          soft    nproc     2048
root       soft    nproc     unlimited
[root@linux-node2 elasticsearch]# vim jvm.options
22 -Xms512m
23 -Xmx512m
[root@linux-node2 elasticsearch]# /etc/init.d/elasticsearch start
Starting elasticsearch:                                    [  OK  ]
[root@linux-node2 elasticsearch]# netstat -tnlup|grep "9200"|grep -v "grep"
tcp        0      0 ::ffff:192.168.100.129:9200 :::*                        LISTEN      1492/java
[root@linux-node2 elasticsearch]# ps -ef|grep elasticsearch|grep -v "grep"
498       1332     1 64 11:45 ?        00:00:57 /usr/bin/java -Xms512m -Xmx512m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+AlwaysPreTouch -server -Xss1m -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djna.nosys=true -Djdk.io.permissionsUseCanonicalPath=true -Dio.netty.noUnsafe=true -Dio.netty.noKeySetOptimization=true -Dio.netty.recycler.maxCapacityPerThread=0 -Dlog4j.shutdownHookEnabled=false -Dlog4j2.disable.jmx=true -Dlog4j.skipJansi=true -XX:+HeapDumpOnOutOfMemoryError -Des.path.home=/usr/share/elasticsearch -cp /usr/share/elasticsearch/lib/* org.elasticsearch.bootstrap.Elasticsearch -p /var/run/elasticsearch/elasticsearch.pid -d -Edefault.path.logs=/var/log/elasticsearch -Edefault.path.data=/var/lib/elasticsearch -Edefault.path.conf=/etc/elasticsearch
[root@linux-node2 elasticsearch]# lsof -i:9200
COMMAND  PID          USER   FD   TYPE DEVICE SIZE/OFF NODE NAME
java    1332 elasticsearch  113u  IPv6  11705      0t0  TCP linux-node2:wap-wsp (LISTEN)
============================================================================================
Download and install the public signing key
[root@linux-node1 ~]# rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch

Installing from the rpm repository
[root@linux-node1 ~]# vim /etc/yum.repos.d/logstash.repo
[logstash-5.x]
name=Elastic repository for 5.x packages
baseurl=https://artifacts.elastic.co/packages/5.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md

Install Logstash
[root@linux-node1 ~]# yum -y install logstash
[root@linux-node1 ~]# rpm -qa logstash
logstash-5.5.1-1.noarch
[root@linux-node1 ~]# rpm -qc logstash
/etc/logstash/jvm.options
/etc/logstash/logstash.yml
/etc/logstash/startup.options
[root@linux-node1 ~]# cd /etc/logstash/
[root@linux-node1 logstash]# cp -a logstash.yml logstash.yml_$(date +%F)
[root@linux-node1 ~]# vim /etc/logstash/logstash.yml
19 node.name: kw_logstash_node1
28 path.data: /data/logstash
37 pipeline.workers: 2
41 pipeline.output.workers: 1
45 pipeline.batch.size: 125
50 pipeline.batch.delay: 5
64 path.config: /etc/logstash/conf.d
172 http.host: "192.168.100.129"
189 log.level: info
190 path.logs: /var/log/logstash
[root@linux-node1 logstash]# grep "^[a-Z]" logstash.yml
node.name: kw_logstash_node1
path.data: /data/logstash
pipeline.workers: 2
pipeline.output.workers: 1
pipeline.batch.size: 125
pipeline.batch.delay: 5
path.config: /etc/logstash/conf.d
http.host: "192.168.100.129"
log.level: info
path.logs: /var/log/logstash
[root@linux-node1 logstash]# mkdir /data/logstash -p
[root@linux-node1 logstash]# ls -ld /data/logstash/
drwxr-xr-x 2 root root 4096 Aug  3 10:13 /data/logstash/
[root@linux-node1 logstash]# chown -R logstash:logstash /data/logstash/
[root@linux-node1 logstash]# ls -ld /data/logstash/ /var/log/logstash/
drwxr-xr-x 2 logstash logstash 4096 Aug  3 10:13 /data/logstash/
drwxrwxr-x 2 logstash root     4096 Jul 19 05:15 /var/log/logstash/
[root@linux-node1 logstash]# vim jvm.options
6 -Xms512m
7 -Xmx512m
[root@linux-node1 logstash]# ln -s /etc/logstash /usr/share/logstash/config
[root@linux-node1 logstash]# ls -ld /usr/share/logstash/config/
drwxr-xr-x 4 root root 4096 Aug  3 11:04 /usr/share/logstash/config/
[root@linux-node1 logstash]# /usr/share/logstash/bin/logstash -e 'input { stdin{} } output { stdout{} }'
The stdin plugin is now waiting for input:
10:34:34.659 [Api Webserver] INFO  logstash.agent - Successfully started Logstash API endpoint {:port=>9600}
Hello World
2017-08-03T02:39:01.990Z linux-node1 Hello World
[root@linux-node1 logstash]# /usr/share/logstash/bin/logstash -e 'input { stdin{} } output { stdout{ codec => rubydebug } }'
The stdin plugin is now waiting for input:
10:41:53.921 [Api Webserver] INFO  logstash.agent - Successfully started Logstash API endpoint {:port=>9600}
Hello World
{
    "@timestamp" => 2017-08-03T02:43:14.865Z,
      "@version" => "1",
          "host" => "linux-node1",
       "message" => "Hello World"
}
[root@linux-node1 logstash]# /usr/share/logstash/bin/logstash -e 'input { stdin{} } output { elasticsearch { hosts => ["192.168.100.129:9200"] } }'
[root@linux-node1 logstash]# /usr/share/logstash/bin/logstash -e 'input { stdin{} } output { elasticsearch { hosts => ["192.168.100.129:9200"] } stdout { codec => rubydebug } }'
The stdin plugin is now waiting for input:
11:04:43.773 [Api Webserver] INFO  logstash.agent - Successfully started Logstash API endpoint {:port=>9600}
Hello World
{
    "@timestamp" => 2017-08-03T03:05:35.283Z,
      "@version" => "1",
          "host" => "linux-node1",
       "message" => "Hello World"
}
Hello Keywa
{
    "@timestamp" => 2017-08-03T03:05:43.380Z,
      "@version" => "1",
          "host" => "linux-node1",
       "message" => "Hello Keywa"
}

[root@linux-node1 ~]# vim /etc/logstash/conf.d/keywa.logstash.conf
input { stdin { } }
output {
  elasticsearch { hosts => ["192.168.100.129:9200"] }
  stdout { codec => rubydebug }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/keywa.logstash.conf
The stdin plugin is now waiting for input:
11:15:46.577 [Api Webserver] INFO  logstash.agent - Successfully started Logstash API endpoint {:port=>9600}
1hehe
{
    "@timestamp" => 2017-08-03T03:22:30.552Z,
      "@version" => "1",
          "host" => "linux-node1",
       "message" => "1hehe"
}
2hehe
{
    "@timestamp" => 2017-08-03T03:22:33.754Z,
      "@version" => "1",
          "host" => "linux-node1",
       "message" => "2hehe"
}

[root@linux-node1 ~]# vim /etc/logstash/conf.d/keywa.logstash.conf
input {

    file {
        path => "/var/log/messages"
        type => "system"
        start_position => "beginning"
    }
}

output {

    elasticsearch {
        hosts => ["192.168.100.129:9200"]
        index => "system-%{+YYYY.MM.dd}"
	}
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/keywa.logstash.conf

[root@linux-node1 ~]# vim /etc/logstash/conf.d/keywa.logstash.conf
input {

    file {
        path => "/var/log/messages"
        type => "system"
        start_position => "beginning"
    }

    file {
       path => "/var/log/elasticsearch/kw_es.log"
       type => "elasticsearch"
       start_position => "beginning"
    }
}

output {

    if [type] == "system" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "system-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "elasticsearch" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "elasticsearch-%{+YYYY.MM.dd}"
        }
    }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/keywa.logstash.conf

[root@linux-node1 ~]# vim /etc/logstash/conf.d/keywa.logstash.conf
input {

    file {
        path => "/var/log/messages"
        type => "system"
        start_position => "beginning"
    }

    file {
        path => "/var/log/elasticsearch/kw_es.log"
        type => "elasticsearch"
        start_position => "beginning"
        codec => multiline {
            pattern => "^\["
            negate => true
            what => "previous"
        }
    }
}

output {

    if [type] == "system" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "system-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "elasticsearch" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "elasticsearch-%{+YYYY.MM.dd}"
        }
    }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/keywa.logstash.conf
============================================================================================
Download and install the public signing key
[root@linux-node1 ~]# rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch

Installing from the RPM repository
[root@linux-node1 ~]# vim /etc/yum.repos.d/kibana.repo
[kibana-5.x]
name=Kibana repository for 5.x packages
baseurl=https://artifacts.elastic.co/packages/5.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md

Install Kibana
[root@linux-node1 ~]# yum -y install kibana
[root@linux-node1 ~]# rpm -qa kibana
kibana-5.5.1-1.x86_64
[root@linux-node1 ~]# rpm -qc kibana
/etc/kibana/kibana.yml
[root@linux-node1 ~]# cd /etc/kibana/
[root@linux-node1 kibana]# cp -a kibana.yml kibana.yml_$(date +%F)
[root@linux-node1 kibana]# vim kibana.yml
2 server.port: 5601
7 server.host: "192.168.100.129"
18 server.name: "kw_kibana"
21 elasticsearch.url: "http://192.168.100.129:9200"
30 kibana.index: ".kibana"
83 pid.file: /var/run/kibana/kibana.pid
[root@linux-node1 kibana]# grep "^[a-Z]" kibana.yml
server.port: 5601
server.host: "192.168.100.129"
server.name: "kw_kibana"
elasticsearch.url: "http://192.168.100.129:9200"
kibana.index: ".kibana"
pid.file: /var/run/kibana/kibana.pid
[root@linux-node1 run]# vim /etc/init.d/kibana
24 pidfile="/var/run/kibana/$name.pid"
[root@linux-node1 kibana]# mkdir /var/run/kibana -p
[root@linux-node1 kibana]# ls -ld /var/run/kibana
drwxr-xr-x 2 root root 4096 Aug  3 13:59 /var/run/kibana
[root@linux-node1 kibana]# chown -R kibana:kibana /var/run/kibana
[root@linux-node1 kibana]# ls -ld /var/run/kibana
drwxr-xr-x 2 kibana kibana 4096 Aug  3 14:02 /var/run/kibana
[root@linux-node1 kibana]# /etc/init.d/kibana start
kibana started
[root@linux-node1 kibana]# netstat -tnlup|grep "5601"|grep -v "grep"
tcp        0      0 192.168.100.129:5601        0.0.0.0:*                   LISTEN      2735/node
[root@linux-node1 kibana]# ps -ef|grep "kibana"|grep -v "grep"
kibana    2735     1 14 14:04 pts/1    00:00:07 /usr/share/kibana/bin/../node/bin/node --no-warnings /usr/share/kibana/bin/../src/cli -c /etc/kibana/kibana.yml
[root@linux-node1 ~]# lsof -i:5601
COMMAND  PID   USER   FD   TYPE DEVICE SIZE/OFF NODE NAME
node    2735 kibana   12u  IPv4  60240      0t0  TCP linux-node1:esmagent (LISTEN)

浏览器访问:http://192.168.100.129:5601

Nginx访问日志收集
[root@linux-node1 ~]# yum -y install nginx
[root@linux-node1 ~]# rpm -qa nginx
nginx-1.10.2-1.el6.x86_64
[root@linux-node1 ~]# chkconfig --level 3 nginx on
[root@linux-node1 ~]# chkconfig --list nginx
nginx           0:off   1:off   2:off   3:on    4:off   5:off   6:off
[root@linux-node1 ~]# /etc/init.d/nginx start
Starting nginx:                                            [  OK  ]
[root@linux-node1 ~]# netstat -tnlup|grep "80"|grep -v "grep"
tcp        0      0 0.0.0.0:80                  0.0.0.0:*                   LISTEN      3272/nginx
tcp        0      0 :::80                       :::*                        LISTEN      3272/nginx
[root@linux-node1 ~]# ps -ef|grep "nginx"|grep -v "grep"
root      3272     1  0 13:49 ?        00:00:00 nginx: master process /usr/sbin/nginx -c /etc/nginx/nginx.conf
nginx     3273  3272  0 13:49 ?        00:00:00 nginx: worker process
[root@linux-node1 ~]# vim /etc/nginx/nginx.conf
log_format  json  '{ "@timestamp": "$time_local", '
         '"@fields": { '
         '"remote_addr": "$remote_addr", '
         '"remote_user": "$remote_user", '
         '"body_bytes_sent": "$body_bytes_sent", '
         '"request_time": "$request_time", '
         '"status": "$status", '
         '"request": "$request", '
         '"request_method": "$request_method", '
         '"http_referrer": "$http_referer", '
         '"http_x_forwarded_for": "$http_x_forwarded_for", '
         '"http_user_agent": "$http_user_agent" } }';
access_log  /var/log/nginx/access_json.log  json;
[root@linux-node1 ~]# nginx -t
nginx: the configuration file /etc/nginx/nginx.conf syntax is ok
nginx: configuration file /etc/nginx/nginx.conf test is successful
[root@linux-node1 ~]# /etc/init.d/nginx restart
Stopping nginx:                                            [  OK  ]
Starting nginx:                                            [  OK  ]
[root@linux-node1 ~]# vim /etc/logstash/conf.d/keywa.logstash.conf
input {

    file {
        path => "/var/log/messages"
        type => "system"
        start_position => "beginning"
    }

    file {
        path => "/var/log/elasticsearch/kw_es.log"
        type => "elasticsearch"
        start_position => "beginning"
        codec => multiline {
            pattern => "^\["
            negate => true
            what => "previous"
        }
    }

    file {
        path => "/var/log/nginx/access_json.log"
        type => "nginx_access"
        start_position => "beginning"
        codec => "json"
    }
}

output {

    if [type] == "system" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "system-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "elasticsearch" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "elasticsearch-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "nginx_access" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "nginx_access-%{+YYYY.MM.dd}"
        }
    }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/keywa.logstash.conf

Rsyslog日志收集
[root@linux-node1 ~]# rpm -aq rsyslog
rsyslog-5.8.10-8.el6.x86_64
[root@linux-node1 ~]# vim /etc/rsyslog.conf
79 *.* @@192.168.100.129:514
[root@linux-node1 ~]# /etc/init.d/rsyslog restart
Shutting down system logger:                               [  OK  ]
Starting system logger:                                    [  OK  ]
[root@linux-node1 ~]# vim /etc/logstash/conf.d/keywa.logstash.conf
input {

    file {
        path => "/var/log/messages"
        type => "system"
        start_position => "beginning"
    }

    file {
        path => "/var/log/elasticsearch/kw_es.log"
        type => "elasticsearch"
        start_position => "beginning"
        codec => multiline {
            pattern => "^\["
            negate => true
            what => "previous"
        }
    }

    file {
        path => "/var/log/nginx/access_json.log"
        type => "nginx_access"
        start_position => "beginning"
        codec => "json"
    }

    syslog {
        type => "syslog"
        host => "192.168.100.129"
        port => "514"
    }
}

output {

    if [type] == "system" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "system-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "elasticsearch" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "elasticsearch-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "nginx_access" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "nginx_access-%{+YYYY.MM.dd}"
        }
    }

    if [type] == "syslog" {
        elasticsearch {
            hosts => ["192.168.100.129:9200"]
            index => "syslog-%{+YYYY.MM.dd}"
        }
    }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/keywa.logstash.conf

Logstash解耦
[root@linux-node1 ~]# yum -y install redis
[root@linux-node1 ~]# rpm -qa redis
redis-2.4.10-1.el6.x86_64
[root@linux-node1 ~]# rpm -qc redis
/etc/logrotate.d/redis
/etc/redis.conf
[root@linux-node1 ~]# vim /etc/redis.conf
17 daemonize yes
25 port 6379
30 bind 192.168.100.129
[root@linux-node1 ~]# /etc/init.d/redis start
Starting redis-server:                                     [  OK  ]
[root@linux-node1 ~]# netstat -tnlup|grep "6379"|grep -v "grep"
tcp        0      0 192.168.100.129:6379        0.0.0.0:*                   LISTEN      3402/redis-server   
[root@linux-node1 ~]# ps -ef|grep "redis"|grep -v "grep"
redis     3402     1  0 16:25 ?        00:00:00 /usr/sbin/redis-server /etc/redis.conf
[root@linux-node1 ~]# lsof -i:6379
COMMAND    PID  USER   FD   TYPE DEVICE SIZE/OFF NODE NAME
redis-ser 3402 redis    4u  IPv4  69011      0t0  TCP linux-node1:6379 (LISTEN)
[root@linux-node1 ~]# vim /etc/logstash/conf.d/redis_out.conf
input {

    stdin{}
}

output {

    redis {
        host => "192.168.100.129"
        port => "6379"
        db => "6"
        data_type => "list"
        key => "demo"
    }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/redis_out.conf
The stdin plugin is now waiting for input:
hello redis

[root@linux-node1 ~]# redis-cli -h 192.168.100.129
redis 192.168.100.129:6379> info
db6:keys=1,expires=0
redis 192.168.100.129:6379> select 6
OK
redis 192.168.100.129:6379[6]> keys *
1) "demo"
redis 192.168.100.129:6379[6]> LINDEX demo -1
"{\"@timestamp\":\"2017-08-03T08:43:25.372Z\",\"@version\":\"1\",\"host\":\"linux-node1\",\"message\":\"hello redis\"}"

[root@linux-node1 ~]# vim /etc/logstash/conf.d/redis_in.conf
input {

    redis {
        host => "192.168.100.129"
        port => "6379"
        db => "6"
        data_type => "list"
        key => "demo"
    }
}

output {

    elasticsearch {
        hosts => ["192.168.100.129:9200"]
        index => "redis-%{+YYYY.MM.dd}"
    }
}
[root@linux-node1 ~]# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/redis_in.conf
============================================================================================


guiyun affiliate

作者头像
南宫俊逸创始人

君子好学,自强不息~

上一篇:CentOS 7 Redis-3.2.8 安装配置
下一篇:自动化运维之saltstack

相关推荐

4

发表评论