[ELK] ELK 환경 설정

seongcheollee·2024년 1월 18일
0
post-thumbnail

스프링부트의 검색을 위해 ElasticSearch 를 사용하려고 한다.
기존의 mysql에 있는 데이터를 ElasticSearch로 옮기기 위해 Logstash를 사용하고, kibana의 경우에는 management로 활용하기 위해 설치했다.

이전에 elasticsearch를 활용한 적이 있었는데, 그 당시에는 프로젝트의 시간관계상 데이터를 직접 넣어서 활용했다.이번에는 mysql rdb에 존재하는 데이터를 연동해서 사용해야하기도 하고, 이전에 사용하지 못했던 아쉬움을 풀어보고자 ELK 환경을 구현해보고자 한다.

버전은 7.17.13으로 통일

elasticsearch DockerFile

elasticsearch/DockerFile
ARG ELASTIC_VERSION
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}
elasticsearch/config/elasticsearch.yml
cluster.name: docker-cluster
network.bind_host: 0.0.0.0
network.host: 0.0.0.0
discovery.type: single-node
xpack.security.enabled: false

Logstash DockerFile

logstash/Dockerife
ARG ELASTIC_VERSION

FROM docker.elastic.co/logstash/logstash:${ELASTIC_VERSION}
logstash/config/logstash.yml
http.host: '0.0.0.0'
node.name: logstash

monitoring.elasticsearch.hosts: ['http://elasticsearch:9200']
xpack.monitoring.enabled: false
xpack.monitoring.elasticsearch.hosts: ['elasticsearch:9200']
log.level: debug
logstash/pipeline/logstash.conf
input {
    jdbc {
        jdbc_validate_connection => true
        clean_run => true
        jdbc_connection_string => "jdbc:mysql://mysql:3306/beatbuddy"
        jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-5/mysql-connector-java-5.1.49.jar"
        jdbc_driver_class => "com.mysql.jdbc.Driver"
        jdbc_user => "root"
        jdbc_password => "1234"
        tracking_column => "id"
        statement => "SELECT * FROM users"
        tracking_column_type => "numeric"
        sql_log_level => "debug"  
        jdbc_validation_timeout => 120  
        jdbc_paging_enabled => true
        use_column_value => true
        schedule => "/1 * * * * *"
        last_run_metadata_path => "/usr/share/logstash/.logstash_jdbc_last_run"
        type => "users"
    }
  
    jdbc {
        jdbc_validate_connection => true
        clean_run => true
        jdbc_connection_string => "jdbc:mysql://mysql:3306/beatbuddy"
        jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-5/mysql-connector-java-5.1.49.jar"
        jdbc_driver_class => "com.mysql.jdbc.Driver"
        jdbc_user => "root"
        jdbc_password => "1234"
        tracking_column => "id"
        statement => "SELECT * FROM feed"
        type => "feed"
        tracking_column_type => "numeric"
        sql_log_level => "debug"  
        jdbc_validation_timeout => 120  
        jdbc_paging_enabled => true
        use_column_value => true
        schedule => "/1 * * * * *"
        last_run_metadata_path => "/usr/share/logstash/.logstash_jdbc_last_run"
    }
}

filter {
    if [type] == "users" {
        mutate {
            add_field => {
                "index" => "bb_users"
            }
        }
    } else if [type] == "feed" {
        mutate {
            add_field => {
                "index" => "bb_feed"
            }
        }
    }
}

output {
    elasticsearch {
        hosts => ["${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}"]
        user => "logstash_system"
        password => "${LOGSTASH_INTERNAL_PASSWORD}"
        index => "%{[index]}"
        document_id => "%{id}"
    }
    stdout { codec => rubydebug }
}

logstash와 jdbc를 연결하는 과정에서 mysql-connector를 찾지 못하는 오류가 발생했다. mysql-connector를 잘 마운트시켰고, 분명히 디렉토리를 찾아가기도하는데, 연결을 못하는 상황이었다.
이래저래 삽질을 하다가 mysql의 버전을 8 -> 5.7로 변경하면서 해결이 되었다.

index 같은 경우에는 type이라는 속성을 가지고 하나의 index안에서 여러 형식의 데이터를 다루려고 했으나, deprecated 되어 여러 개의 인덱스를 동적으로 설정해주게 끔 생성했다.

Kibana DockerFile

kibana/DokcerFile
ARG ELASTIC_VERSION

FROM docker.elastic.co/kibana/kibana:${ELASTIC_VERSION}
kibana/config/kibana.yml
server.name: kibana
server.host: 0.0.0.0
elasticsearch.hosts: [http://elasticsearch:9200]

monitoring.ui.container.elasticsearch.enabled: true
# monitoring.ui.container.logstash.enabled: true

## X-Pack security credentials
#
#elasticsearch.username: kibana_system
#elasticsearch.password: ${KIBANA_SYSTEM_PASSWORD}
elasticsearch.username: 
elasticsearch.password: 

Docker-compose.yml [ ELK + Mysql ]

version: '3.7'

services:
  setup:
    profiles:
      - setup
    build:
      context: setup/
      args:
        ELASTIC_VERSION: ${ELASTIC_VERSION}
    init: true
    volumes:
      - ./setup/entrypoint.sh:/entrypoint.sh:ro,Z
      - ./setup/lib.sh:/lib.sh:ro,Z
      - ./setup/roles:/roles:ro,Z
    environment:
      ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-}
      LOGSTASH_INTERNAL_PASSWORD: ${LOGSTASH_INTERNAL_PASSWORD:-}
      KIBANA_SYSTEM_PASSWORD: ${KIBANA_SYSTEM_PASSWORD:-}
      METRICBEAT_INTERNAL_PASSWORD: ${METRICBEAT_INTERNAL_PASSWORD:-}
      FILEBEAT_INTERNAL_PASSWORD: ${FILEBEAT_INTERNAL_PASSWORD:-}
      HEARTBEAT_INTERNAL_PASSWORD: ${HEARTBEAT_INTERNAL_PASSWORD:-}
      MONITORING_INTERNAL_PASSWORD: ${MONITORING_INTERNAL_PASSWORD:-}
      BEATS_SYSTEM_PASSWORD: ${BEATS_SYSTEM_PASSWORD:-}
    networks:
      - ubuntu_my_network
    depends_on:
      - elasticsearch

  elasticsearch:
    container_name: elasticsearch
    build:
      context: elasticsearch/
      args:
        ELASTIC_VERSION: ${ELASTIC_VERSION}
    volumes:
      - ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro,Z
      - elasticsearch:/usr/share/elasticsearch/data:Z
    ports:
      - 9200:9200
      - 9300:9300
    environment:
      node.name: elasticsearch
      ES_JAVA_OPTS: -Xms512m -Xmx512m
      ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-}
      discovery.type: single-node
    networks:
      - ubuntu_my_network
    restart: unless-stopped

  logstash:
    build:
      context: logstash/
      args:
        ELASTIC_VERSION: ${ELASTIC_VERSION}
    volumes:
      - /Users/seongcheollee/Downloads/docker-elk/logstash/mysql-connector-java-5:/usr/share/logstash/mysql-connector-java-5
      - ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml
      - ./logstash/pipeline:/usr/share/logstash/pipeline
    ports:
      - 5044:5044
      - 50000:50000/tcp
      - 50000:50000/udp
      - 9600:9600
    environment:
      LS_JAVA_OPTS: -Xms256m -Xmx256m
      LOGSTASH_INTERNAL_PASSWORD: bb123
      ELASTICSEARCH_HOST: elasticsearch
      ELASTICSEARCH_PORT: 9200
    networks:
      - ubuntu_my_network
    depends_on:
      - elasticsearch
    restart: unless-stopped

  kibana:
    build:
      context: kibana/
      args:
        ELASTIC_VERSION: ${ELASTIC_VERSION}
    volumes:
      - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro,Z
    ports:
      - 5601:5601
    environment:
      KIBANA_SYSTEM_PASSWORD: ${KIBANA_SYSTEM_PASSWORD:-}
    networks:
      - ubuntu_my_network
    depends_on:
      - elasticsearch
    restart: unless-stopped

  mysql:
    image: mysql:5.7.8
    container_name: mysql
    ports:
      - '3307:3306'
    environment:
      MYSQL_ROOT_PASSWORD: 
      MYSQL_DATABASE: 
      character-set-server: utf8
      collation-server: utf8_general_ci
    networks:
      - ubuntu_my_network
networks:
  ubuntu_my_network:
    driver: bridge

volumes:
  elasticsearch:

레퍼런스
https://github.com/deviantony/docker-elk

0개의 댓글