From b3b2bdf18cc1aade24d4ad98998c7d293375e77e Mon Sep 17 00:00:00 2001 From: Adrien Audebert Date: Mon, 5 Aug 2024 13:49:25 +0200 Subject: [PATCH] update --- .env | 6 +----- docker-compose.yaml | 50 ++++++++++++++++++++------------------------- logstash.conf | 44 +++++++++++++++++++++++++-------------- makefile | 6 ++---- 4 files changed, 54 insertions(+), 52 deletions(-) diff --git a/.env b/.env index cab57c9..afb0168 100644 --- a/.env +++ b/.env @@ -12,18 +12,14 @@ DB_HOST=db DB_PORT=5432 PROJECT_PATH=${PWD}/pong -POSTGRES_DATA_PATH=${PWD}/data/db -ES_DATA_PATH=${PWD}/data/es -KIBA_DATA_PATH=${PWD}/data/kiba -LSTASH_DATA_PATH=${PWD}/data/lstash # ElasticSearch settings STACK_VERSION=8.14.3 CLUSTER_NAME=docker-cluster LICENSE=basic +ELASTIC_USERNAME=adrien ELASTIC_PASSWORD=qwerty42 -ES_PORT=9200 # Kibana settings KIBANA_PASSWORD=qwerty42 diff --git a/docker-compose.yaml b/docker-compose.yaml index ddc7844..d35c128 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -40,13 +40,19 @@ services: POSTGRES_DB: ${POSTGRES_DB} POSTGRES_USER: ${POSTGRES_USER} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + interval: 10s + timeout: 5s + retries: 5 es01: image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: es01 volumes: - pong_logstash_data_01:/usr/share/elasticsearch/data ports: - - ${ES_PORT}:9200 + - "127.0.0.1:9200:9200" networks: - app-network environment: @@ -54,6 +60,8 @@ services: - cluster.name=${CLUSTER_NAME} - discovery.type=single-node - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.security.enabled=false + - xpack.license.self_generated.type=trial mem_limit: ${ES_MEM_LIMIT} ulimits: memlock: @@ -65,34 +73,36 @@ services: - es01 - kibana image: docker.elastic.co/logstash/logstash:${STACK_VERSION} - container_name: logstash + container_name: logstash01 volumes: - - pong_logstash_data_01:/usr/share/logstash/data/logstash.conf + - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf ports: - "5044:5044" networks: - app-network environment: - - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 - - ELASTICSEARCH_USERNAME=kibana_system - - ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD} + - ELASTIC_HOSTS=http://es01:9200 + - ELASTIC_USER=${ELASTIC_USERNAME} + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.monitoring.enabled=false kibana: image: docker.elastic.co/kibana/kibana:${STACK_VERSION} + depends_on: + - es01 container_name: kibana volumes: - pong_kibana:/usr/share/kibana/data - - pong_logstash_data_01:/usr/share/logstash/data - user: root + user: "1000:1000" ports: - 5601:5601 networks: - app-network environment: - - SERVERNAME=kibana - - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 - - ELASTICSEARCH_USERNAME=kibana_system - - ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD} + - SERVERNAME=pong.kibana.org + - ELASTICSEARCH_HOSTS=http://es01:9200 + - ELASTICSEARCH_USERNAME=${ELASTIC_USERNAME} + - ELASTICSEARCH_PASSWORD=${ELASTIC_PASSWORD} mem_limit: ${KB_MEM_LIMIT} #healthcheck: #test: ["CMD-SHELL", "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"] @@ -109,28 +119,12 @@ volumes: o: bind pong_pg_data: driver: local - driver_opts: - type: none - device: ${POSTGRES_DATA_PATH} - o: bind pong_es_data_01: driver: local - driver_opts: - type: none - device: ${ES_DATA_PATH} - o: bind pong_kibana: driver: local - driver_opts: - type: none - device: ${KIBA_DATA_PATH} - o: bind pong_logstash_data_01: driver: local - driver_opts: - type: none - device: ${LSTASH_DATA_PATH} - o: bind networks: app-network: diff --git a/logstash.conf b/logstash.conf index beca20b..6ee59a8 100644 --- a/logstash.conf +++ b/logstash.conf @@ -1,26 +1,40 @@ input { - stdin { } -} - -filter { - grok { - match => { - "message" => '%{IP:client_ip} - - \[%{HTTPDATE:timestamp}\] "%{WORD:http_method} %{URIPATH:request_path}" %{NUMBER:http_status_code} %{NUMBER:response_size}' - } - # Optional: add a tag to the event for easier identification - add_tag => ["parsed_log"] + # Input from Docker container's stdout + beats { + port => 5044 + } } - # Optionally, convert the timestamp to the Logstash @timestamp +filter { + # Grok filter to parse the log lines + grok { + match => { + "message" => [ + "%{DATA:container_name}\s*\|\s*%{IP:client_ip}:%{NUMBER:client_port} - - \[%{HTTPDATE:timestamp}\] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" %{NUMBER:status} %{NUMBER:bytes}" + ] + } + } + + # Date filter to convert timestamp to a proper date format date { - match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] + match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"] target => "@timestamp" + remove_field => ["timestamp"] } } output { - elasticsearch { - hosts => ["http://es01:9200"] - index => "logstash-%{+YYYY.MM.dd}" + # Output to stdout for testing purposes + stdout { + codec => rubydebug } + + # Optionally, output to Elasticsearch +elasticsearch { + hosts => ["localhost:9200"] + index => "docker-logs-%{+YYYY.MM.dd}" + user=> "${ELASTIC_USER}" + password=> "${ELASTIC_PASSWORD}" + } + } diff --git a/makefile b/makefile index 9b3edb8..754f563 100644 --- a/makefile +++ b/makefile @@ -3,7 +3,6 @@ COMPOSE=docker compose -f $(COMPOSE_FILE) CONTAINER=$(c) up: - sudo mkdir -p $$PWD/data/db $(COMPOSE) build $(COMPOSE) up $(CONTAINER) @@ -21,9 +20,8 @@ down: destroy: $(COMPOSE) down -v --rmi all - #sudo rm -rf $$PWD/data/db - #sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true - #sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true + sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true + sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true logs: $(COMPOSE) logs -f $(CONTAINER)