Thursday, April 7, 2016

Instalacija eastichearch, logstash and kibana

https://www.digitalocean.com/community/tutorials/how-to-install-elasticsearch-1-7-logstash-1-5-and-kibana-4-1-elk-stack-on-centos-7

Logstash: The server component of Logstash that processes incoming logs
Elasticsearch: Stores all of the logs
Kibana: Web interface for searching and visualizing logs, which will be proxied through Nginx


#***********************Install Java 8
Download jre-8u74-linux-x64.rpm from:
https://mirror.its.sfu.ca/mirror/CentOS-Third-Party/NSG/common/x86_64/
cd /root
yum localinstall jre-8u74-linux-x64.rpm
ls -la /usr/bin/jav*
ls -la /etc/alternatives/java

#************************Install Elasticsearch
cd /root
ls

yum localinstall elasticsearch-2.3.1.rpm

#Elasticsearch is now installed. Let's edit the configuration:
sudo vi /etc/elasticsearch/elasticsearch.yml
#You will want to restrict outside access to your Elasticsearch instance (port 9200)
network.host: localhost

#Now start Elasticsearch:
sudo systemctl start elasticsearch
sudo systemctl enable elasticsearch
sudo systemctl status elasticsearch

##########################Install Kibana

cd /root
yum localinstall kibana-4.5.0-1.x86_64.rpm
sudo vi /opt/kibana/config/kibana.yml
server.host: "localhost"
#end vi
sudo systemctl start kibana
sudo systemctl status kibana
sudo systemctl enable kibana

############################Install Nginx
Because we configured Kibana to listen on localhost, we must set up a reverse proxy to allow external access to it. We will use Nginx for this purpose.
#Add the EPEL repository to yum
sudo yum -y install epel-release
sudo yum -y install nginx httpd-tools


sudo htpasswd -c /etc/nginx/htpasswd.users kibanaadmin
password: kibanaadmin



sudo vi /etc/nginx/nginx.conf
#proveri
 include /etc/nginx/conf.d/*.conf;
#end vi

#Now we will create an Nginx server block in a new file:

sudo vi /etc/nginx/conf.d/kibana.conf
server {
    listen 80;

    server_name example.com;

    auth_basic "Restricted Access";
    auth_basic_user_file /etc/nginx/htpasswd.users;

    location / {
        proxy_pass http://localhost:5601;
        proxy_http_version 1.1;
        proxy_set_header Upgrade $http_upgrade;
        proxy_set_header Connection 'upgrade';
        proxy_set_header Host $host;
        proxy_cache_bypass $http_upgrade;      
    }
}
#endvi
sudo systemctl stop httpd.service
sudo systemctl start nginx
sudo systemctl enable nginx

#Note: This tutorial assumes that SELinux is disabled. If this is not the case, you may need to run the following command for Kibana to work properly: sudo setsebool -P httpd_can_network_connect 1
sudo setsebool -P httpd_can_network_connect 1


##################################Install Logstash
cd /root
yum localinstall logstash-2.3.0-1.noarch.rpm


##################################Configure Logstash
sudo vi /etc/logstash/conf.d/01-lumberjack-input.conf
input {
  lumberjack {
    port => 5043
    type => "logs"
    ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
    ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
  }
}
#end vi

sudo vi /etc/logstash/conf.d/10-syslog.conf
filter {
  if [type] == "syslog" {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
    }
  }
}
#end vi

sudo vi /etc/logstash/conf.d/30-lumberjack-output.conf
output {
  elasticsearch { host => localhost }
  stdout { codec => rubydebug }
}
#This output basically configures Logstash to store the logs in Elasticsearch.






No comments:

Post a Comment