This commit is contained in:
Adrien Audebert 2024-08-05 13:49:25 +02:00
parent 5f22737bf2
commit b3b2bdf18c
4 changed files with 54 additions and 52 deletions

6
.env
View File

@ -12,18 +12,14 @@ DB_HOST=db
DB_PORT=5432
PROJECT_PATH=${PWD}/pong
POSTGRES_DATA_PATH=${PWD}/data/db
ES_DATA_PATH=${PWD}/data/es
KIBA_DATA_PATH=${PWD}/data/kiba
LSTASH_DATA_PATH=${PWD}/data/lstash
# ElasticSearch settings
STACK_VERSION=8.14.3
CLUSTER_NAME=docker-cluster
LICENSE=basic
ELASTIC_USERNAME=adrien
ELASTIC_PASSWORD=qwerty42
ES_PORT=9200
# Kibana settings
KIBANA_PASSWORD=qwerty42

View File

@ -40,13 +40,19 @@ services:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
es01:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: es01
volumes:
- pong_logstash_data_01:/usr/share/elasticsearch/data
ports:
- ${ES_PORT}:9200
- "127.0.0.1:9200:9200"
networks:
- app-network
environment:
@ -54,6 +60,8 @@ services:
- cluster.name=${CLUSTER_NAME}
- discovery.type=single-node
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- xpack.security.enabled=false
- xpack.license.self_generated.type=trial
mem_limit: ${ES_MEM_LIMIT}
ulimits:
memlock:
@ -65,34 +73,36 @@ services:
- es01
- kibana
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
container_name: logstash
container_name: logstash01
volumes:
- pong_logstash_data_01:/usr/share/logstash/data/logstash.conf
- ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf
ports:
- "5044:5044"
networks:
- app-network
environment:
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
- ELASTICSEARCH_USERNAME=kibana_system
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
- ELASTIC_HOSTS=http://es01:9200
- ELASTIC_USER=${ELASTIC_USERNAME}
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- xpack.monitoring.enabled=false
kibana:
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
depends_on:
- es01
container_name: kibana
volumes:
- pong_kibana:/usr/share/kibana/data
- pong_logstash_data_01:/usr/share/logstash/data
user: root
user: "1000:1000"
ports:
- 5601:5601
networks:
- app-network
environment:
- SERVERNAME=kibana
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
- ELASTICSEARCH_USERNAME=kibana_system
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
- SERVERNAME=pong.kibana.org
- ELASTICSEARCH_HOSTS=http://es01:9200
- ELASTICSEARCH_USERNAME=${ELASTIC_USERNAME}
- ELASTICSEARCH_PASSWORD=${ELASTIC_PASSWORD}
mem_limit: ${KB_MEM_LIMIT}
#healthcheck:
#test: ["CMD-SHELL", "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"]
@ -109,28 +119,12 @@ volumes:
o: bind
pong_pg_data:
driver: local
driver_opts:
type: none
device: ${POSTGRES_DATA_PATH}
o: bind
pong_es_data_01:
driver: local
driver_opts:
type: none
device: ${ES_DATA_PATH}
o: bind
pong_kibana:
driver: local
driver_opts:
type: none
device: ${KIBA_DATA_PATH}
o: bind
pong_logstash_data_01:
driver: local
driver_opts:
type: none
device: ${LSTASH_DATA_PATH}
o: bind
networks:
app-network:

View File

@ -1,26 +1,40 @@
input {
stdin { }
}
# Input from Docker container's stdout
beats {
port => 5044
}
}
filter {
# Grok filter to parse the log lines
grok {
match => {
"message" => '%{IP:client_ip} - - \[%{HTTPDATE:timestamp}\] "%{WORD:http_method} %{URIPATH:request_path}" %{NUMBER:http_status_code} %{NUMBER:response_size}'
"message" => [
"%{DATA:container_name}\s*\|\s*%{IP:client_ip}:%{NUMBER:client_port} - - \[%{HTTPDATE:timestamp}\] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" %{NUMBER:status} %{NUMBER:bytes}"
]
}
# Optional: add a tag to the event for easier identification
add_tag => ["parsed_log"]
}
# Optionally, convert the timestamp to the Logstash @timestamp
# Date filter to convert timestamp to a proper date format
date {
match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
target => "@timestamp"
remove_field => ["timestamp"]
}
}
output {
elasticsearch {
hosts => ["http://es01:9200"]
index => "logstash-%{+YYYY.MM.dd}"
# Output to stdout for testing purposes
stdout {
codec => rubydebug
}
# Optionally, output to Elasticsearch
elasticsearch {
hosts => ["localhost:9200"]
index => "docker-logs-%{+YYYY.MM.dd}"
user=> "${ELASTIC_USER}"
password=> "${ELASTIC_PASSWORD}"
}
}

View File

@ -3,7 +3,6 @@ COMPOSE=docker compose -f $(COMPOSE_FILE)
CONTAINER=$(c)
up:
sudo mkdir -p $$PWD/data/db
$(COMPOSE) build
$(COMPOSE) up $(CONTAINER)
@ -21,9 +20,8 @@ down:
destroy:
$(COMPOSE) down -v --rmi all
#sudo rm -rf $$PWD/data/db
#sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
#sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
logs:
$(COMPOSE) logs -f $(CONTAINER)