This commit is contained in:
Adrien Audebert 2024-08-05 13:49:25 +02:00
parent 5f22737bf2
commit b3b2bdf18c
4 changed files with 54 additions and 52 deletions

6
.env
View File

@ -12,18 +12,14 @@ DB_HOST=db
DB_PORT=5432 DB_PORT=5432
PROJECT_PATH=${PWD}/pong PROJECT_PATH=${PWD}/pong
POSTGRES_DATA_PATH=${PWD}/data/db
ES_DATA_PATH=${PWD}/data/es
KIBA_DATA_PATH=${PWD}/data/kiba
LSTASH_DATA_PATH=${PWD}/data/lstash
# ElasticSearch settings # ElasticSearch settings
STACK_VERSION=8.14.3 STACK_VERSION=8.14.3
CLUSTER_NAME=docker-cluster CLUSTER_NAME=docker-cluster
LICENSE=basic LICENSE=basic
ELASTIC_USERNAME=adrien
ELASTIC_PASSWORD=qwerty42 ELASTIC_PASSWORD=qwerty42
ES_PORT=9200
# Kibana settings # Kibana settings
KIBANA_PASSWORD=qwerty42 KIBANA_PASSWORD=qwerty42

View File

@ -40,13 +40,19 @@ services:
POSTGRES_DB: ${POSTGRES_DB} POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER} POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
es01: es01:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: es01
volumes: volumes:
- pong_logstash_data_01:/usr/share/elasticsearch/data - pong_logstash_data_01:/usr/share/elasticsearch/data
ports: ports:
- ${ES_PORT}:9200 - "127.0.0.1:9200:9200"
networks: networks:
- app-network - app-network
environment: environment:
@ -54,6 +60,8 @@ services:
- cluster.name=${CLUSTER_NAME} - cluster.name=${CLUSTER_NAME}
- discovery.type=single-node - discovery.type=single-node
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD} - ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- xpack.security.enabled=false
- xpack.license.self_generated.type=trial
mem_limit: ${ES_MEM_LIMIT} mem_limit: ${ES_MEM_LIMIT}
ulimits: ulimits:
memlock: memlock:
@ -65,34 +73,36 @@ services:
- es01 - es01
- kibana - kibana
image: docker.elastic.co/logstash/logstash:${STACK_VERSION} image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
container_name: logstash container_name: logstash01
volumes: volumes:
- pong_logstash_data_01:/usr/share/logstash/data/logstash.conf - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf
ports: ports:
- "5044:5044" - "5044:5044"
networks: networks:
- app-network - app-network
environment: environment:
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200 - ELASTIC_HOSTS=http://es01:9200
- ELASTICSEARCH_USERNAME=kibana_system - ELASTIC_USER=${ELASTIC_USERNAME}
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD} - ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- xpack.monitoring.enabled=false
kibana: kibana:
image: docker.elastic.co/kibana/kibana:${STACK_VERSION} image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
depends_on:
- es01
container_name: kibana container_name: kibana
volumes: volumes:
- pong_kibana:/usr/share/kibana/data - pong_kibana:/usr/share/kibana/data
- pong_logstash_data_01:/usr/share/logstash/data user: "1000:1000"
user: root
ports: ports:
- 5601:5601 - 5601:5601
networks: networks:
- app-network - app-network
environment: environment:
- SERVERNAME=kibana - SERVERNAME=pong.kibana.org
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200 - ELASTICSEARCH_HOSTS=http://es01:9200
- ELASTICSEARCH_USERNAME=kibana_system - ELASTICSEARCH_USERNAME=${ELASTIC_USERNAME}
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD} - ELASTICSEARCH_PASSWORD=${ELASTIC_PASSWORD}
mem_limit: ${KB_MEM_LIMIT} mem_limit: ${KB_MEM_LIMIT}
#healthcheck: #healthcheck:
#test: ["CMD-SHELL", "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"] #test: ["CMD-SHELL", "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"]
@ -109,28 +119,12 @@ volumes:
o: bind o: bind
pong_pg_data: pong_pg_data:
driver: local driver: local
driver_opts:
type: none
device: ${POSTGRES_DATA_PATH}
o: bind
pong_es_data_01: pong_es_data_01:
driver: local driver: local
driver_opts:
type: none
device: ${ES_DATA_PATH}
o: bind
pong_kibana: pong_kibana:
driver: local driver: local
driver_opts:
type: none
device: ${KIBA_DATA_PATH}
o: bind
pong_logstash_data_01: pong_logstash_data_01:
driver: local driver: local
driver_opts:
type: none
device: ${LSTASH_DATA_PATH}
o: bind
networks: networks:
app-network: app-network:

View File

@ -1,26 +1,40 @@
input { input {
stdin { } # Input from Docker container's stdout
} beats {
port => 5044
filter { }
grok {
match => {
"message" => '%{IP:client_ip} - - \[%{HTTPDATE:timestamp}\] "%{WORD:http_method} %{URIPATH:request_path}" %{NUMBER:http_status_code} %{NUMBER:response_size}'
}
# Optional: add a tag to the event for easier identification
add_tag => ["parsed_log"]
} }
# Optionally, convert the timestamp to the Logstash @timestamp filter {
# Grok filter to parse the log lines
grok {
match => {
"message" => [
"%{DATA:container_name}\s*\|\s*%{IP:client_ip}:%{NUMBER:client_port} - - \[%{HTTPDATE:timestamp}\] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" %{NUMBER:status} %{NUMBER:bytes}"
]
}
}
# Date filter to convert timestamp to a proper date format
date { date {
match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
target => "@timestamp" target => "@timestamp"
remove_field => ["timestamp"]
} }
} }
output { output {
elasticsearch { # Output to stdout for testing purposes
hosts => ["http://es01:9200"] stdout {
index => "logstash-%{+YYYY.MM.dd}" codec => rubydebug
} }
# Optionally, output to Elasticsearch
elasticsearch {
hosts => ["localhost:9200"]
index => "docker-logs-%{+YYYY.MM.dd}"
user=> "${ELASTIC_USER}"
password=> "${ELASTIC_PASSWORD}"
}
} }

View File

@ -3,7 +3,6 @@ COMPOSE=docker compose -f $(COMPOSE_FILE)
CONTAINER=$(c) CONTAINER=$(c)
up: up:
sudo mkdir -p $$PWD/data/db
$(COMPOSE) build $(COMPOSE) build
$(COMPOSE) up $(CONTAINER) $(COMPOSE) up $(CONTAINER)
@ -21,9 +20,8 @@ down:
destroy: destroy:
$(COMPOSE) down -v --rmi all $(COMPOSE) down -v --rmi all
#sudo rm -rf $$PWD/data/db sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
#sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
#sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
logs: logs:
$(COMPOSE) logs -f $(CONTAINER) $(COMPOSE) logs -f $(CONTAINER)