separate elk to prject

This commit is contained in:
Adrien Audebert 2024-09-13 15:14:21 +02:00
parent fc34fd2174
commit f06bb700ae
5 changed files with 234 additions and 171 deletions

182
docker-compose-elk.yml Normal file
View File

@ -0,0 +1,182 @@
services:
setup:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: setup
user: "0"
volumes:
- certs:/usr/share/elasticsearch/config/certs
command: >
bash -c '
if [ x${ELASTIC_PASSWORD} == x ]; then
echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
exit 1;
elif [ x${KIBANA_PASSWORD} == x ]; then
echo "Set the KIBANA_PASSWORD environment variable in the .env file";
exit 1;
fi;
if [ ! -f config/certs/ca.zip ]; then
echo "Creating CA";
bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip;
unzip config/certs/ca.zip -d config/certs;
fi;
if [ ! -f config/certs/certs.zip ]; then
echo "Creating certs";
echo -ne \
"instances:\n"\
" - name: es01\n"\
" dns:\n"\
" - es01\n"\
" - localhost\n"\
" ip:\n"\
" - 127.0.0.1\n"\
" - name: kibana\n"\
" dns:\n"\
" - kibana\n"\
" - localhost\n"\
" ip:\n"\
" - 127.0.0.1\n"\
> config/certs/instances.yml;
bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key;
unzip config/certs/certs.zip -d config/certs;
fi;
echo "Setting file permissions"
chown -R root:root config/certs;
find . -type d -exec chmod 750 \{\} \;;
find . -type f -exec chmod 640 \{\} \;;
echo "Waiting for Elasticsearch availability";
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
echo "Setting kibana_system password";
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
echo "All done!";
'
healthcheck:
test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"]
interval: 1s
timeout: 5s
retries: 120
networks:
- app-network
es01:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: es01
depends_on:
setup:
condition: service_healthy
volumes:
- certs:/usr/share/elasticsearch/config/certs:ro
- pong_es_data_01:/usr/share/elasticsearch/data
labels:
co.elastic.logs/module: elasticsearch
ports:
- 9200:9200
environment:
- node.name=es01
- cluster.name=${CLUSTER_NAME}
- discovery.type=single-node
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- bootstrap.memory_lock=true
- xpack.security.enabled=true
- xpack.security.http.ssl.enabled=true
- xpack.security.http.ssl.key=certs/es01/es01.key
- xpack.security.http.ssl.certificate=certs/es01/es01.crt
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.security.transport.ssl.enabled=true
- xpack.security.transport.ssl.key=certs/es01/es01.key
- xpack.security.transport.ssl.certificate=certs/es01/es01.crt
- xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.security.transport.ssl.verification_mode=certificate
- xpack.license.self_generated.type=${LICENSE}
healthcheck:
test:
[
"CMD-SHELL",
"curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
]
interval: 10s
timeout: 10s
retries: 120
networks:
- app-network
kibana:
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
container_name: kibana
labels:
co.elastic.logs/module: kibana
depends_on:
es01:
condition: service_healthy
volumes:
- certs:/usr/share/kibana/config/certs:ro
- pong_kibana:/usr/share/kibana/data
ports:
- 5601:5601
environment:
- SERVERNAME=kibana
- ELASTICSEARCH_HOSTS=https://es01:9200
- ELASTICSEARCH_USERNAME=${KIBANA_USERNAME}
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
- ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
- XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY}
- XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY}
- XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY}
healthcheck:
test:
[
"CMD-SHELL",
"curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"
]
interval: 10s
timeout: 10s
retries: 120
networks:
- app-network
logstash01:
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
container_name: logstash01
labels:
co.elastic.logs/module: logstash
user: root
depends_on:
es01:
condition: service_healthy
kibana:
condition: service_healthy
volumes:
- certs:/usr/share/logstash/certs
- pong_logstash_data01:/usr/share/logstash/data
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro
- pong_django_logs:/usr/share/logstash/logs
ports:
- "5044:5044/udp"
command: logstash -f /usr/share/logstash/pipeline/logstash.conf
environment:
- NODE_NAME="logstash"
- ELASTIC_HOSTS=https://es01:9200
- ELASTIC_USER=${ELASTIC_USERNAME}
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- xpack.monitoring.enabled=false
networks:
- app-network
volumes:
pong_django_logs:
pong_es_data_01:
driver: local
pong_kibana:
driver: local
pong_logstash_data01:
driver: local
certs:
driver: local
networks:
app-network:
name: app-network
external: true

View File

@ -1,63 +1,4 @@
services:
setup:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: setup
user: "0"
volumes:
- certs:/usr/share/elasticsearch/config/certs
command: >
bash -c '
if [ x${ELASTIC_PASSWORD} == x ]; then
echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
exit 1;
elif [ x${KIBANA_PASSWORD} == x ]; then
echo "Set the KIBANA_PASSWORD environment variable in the .env file";
exit 1;
fi;
if [ ! -f config/certs/ca.zip ]; then
echo "Creating CA";
bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip;
unzip config/certs/ca.zip -d config/certs;
fi;
if [ ! -f config/certs/certs.zip ]; then
echo "Creating certs";
echo -ne \
"instances:\n"\
" - name: es01\n"\
" dns:\n"\
" - es01\n"\
" - localhost\n"\
" ip:\n"\
" - 127.0.0.1\n"\
" - name: kibana\n"\
" dns:\n"\
" - kibana\n"\
" - localhost\n"\
" ip:\n"\
" - 127.0.0.1\n"\
> config/certs/instances.yml;
bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key;
unzip config/certs/certs.zip -d config/certs;
fi;
echo "Setting file permissions"
chown -R root:root config/certs;
find . -type d -exec chmod 750 \{\} \;;
find . -type f -exec chmod 640 \{\} \;;
echo "Waiting for Elasticsearch availability";
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
echo "Setting kibana_system password";
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
echo "All done!";
'
healthcheck:
test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"]
interval: 1s
timeout: 5s
retries: 120
backend:
build:
context: .
@ -115,105 +56,7 @@ services:
interval: 10s
timeout: 5s
retries: 5
es01:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: es01
depends_on:
setup:
condition: service_healthy
volumes:
- certs:/usr/share/elasticsearch/config/certs:ro
- pong_es_data_01:/usr/share/elasticsearch/data
labels:
co.elastic.logs/module: elasticsearch
ports:
- 9200:9200
environment:
- node.name=es01
- cluster.name=${CLUSTER_NAME}
- discovery.type=single-node
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- bootstrap.memory_lock=true
- xpack.security.enabled=true
- xpack.security.http.ssl.enabled=true
- xpack.security.http.ssl.key=certs/es01/es01.key
- xpack.security.http.ssl.certificate=certs/es01/es01.crt
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.security.transport.ssl.enabled=true
- xpack.security.transport.ssl.key=certs/es01/es01.key
- xpack.security.transport.ssl.certificate=certs/es01/es01.crt
- xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.security.transport.ssl.verification_mode=certificate
- xpack.license.self_generated.type=${LICENSE}
healthcheck:
test:
[
"CMD-SHELL",
"curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
]
interval: 10s
timeout: 10s
retries: 120
kibana:
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
container_name: kibana
labels:
co.elastic.logs/module: kibana
depends_on:
es01:
condition: service_healthy
volumes:
- certs:/usr/share/kibana/config/certs:ro
- pong_kibana:/usr/share/kibana/data
ports:
- 5601:5601
environment:
- SERVERNAME=kibana
- ELASTICSEARCH_HOSTS=https://es01:9200
- ELASTICSEARCH_USERNAME=${KIBANA_USERNAME}
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
- ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
- XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY}
- XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY}
- XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY}
healthcheck:
test:
[
"CMD-SHELL",
"curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"
]
interval: 10s
timeout: 10s
retries: 120
logstash01:
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
container_name: logstash01
labels:
co.elastic.logs/module: logstash
user: root
depends_on:
es01:
condition: service_healthy
kibana:
condition: service_healthy
volumes:
- certs:/usr/share/logstash/certs
- pong_logstash_data01:/usr/share/logstash/data
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro
- pong_django_logs:/usr/share/logstash/logs
ports:
- "5044:5044/udp"
command: logstash -f /usr/share/logstash/pipeline/logstash.conf
environment:
- NODE_NAME="logstash"
- ELASTIC_HOSTS=https://es01:9200
- ELASTIC_USER=${ELASTIC_USERNAME}
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- xpack.monitoring.enabled=false
volumes:
pong:
driver: local

View File

@ -1,26 +1,38 @@
MAIN_PROJECT_NAME=main_project
ELK_PROJECT_NAME=elk_project
COMPOSE_FILE=docker-compose.yml
COMPOSE=docker compose -f $(COMPOSE_FILE)
ELK_COMPOSE_FILE=docker-compose-elk.yml
COMPOSE=docker compose -f $(COMPOSE_FILE) -p $(MAIN_PROJECT_NAME)
ELK_COMPOSE=docker compose -f $(ELK_COMPOSE_FILE) -p $(ELK_PROJECT_NAME)
CONTAINER=$(c)
up: down
up:
$(COMPOSE) build
$(COMPOSE) up -d $(CONTAINER) || true
build:
$(COMPOSE) build $(CONTAINER)
start:
$(COMPOSE) start $(CONTAINER)
stop:
$(COMPOSE) stop $(CONTAINER)
down:
$(COMPOSE) down $(CONTAINER)
destroy:
$(COMPOSE) down -v --rmi all
# Manage ELK stack
elk-up:
$(ELK_COMPOSE) up -d --remove-orphans || true
elk-down:
$(ELK_COMPOSE) down --remove-orphans
elk-destroy:
$(ELK_COMPOSE) down --remove-orphans -v --rmi all
kill-pid:
sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
sudo lsof -i :5601 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true
@ -34,8 +46,6 @@ ps:
db-shell:
$(COMPOSE) exec db psql -U 42student players_db
re: destroy up
help:
@echo "Usage:"
@echo " make build [c=service] # Build images"

View File

@ -12,8 +12,8 @@ def handle_game_data(p1, p2, s_p1, s_p2, bt_p1, bt_2, dur, is_tournoi, name_tour
create_match(player_1, player_2, s_p1, s_p2, bt_p1, bt_2, dur, is_tournoi, name_tournament)
update_player_statistics(p1)
update_player_statistics(p2)
#update_player_statistics(p1)
#update_player_statistics(p2)
except Exception as e:
print(f"Error in endfortheouche: {e}")
@ -65,6 +65,7 @@ def create_player(
m_duration=m_duration,
num_participated_tournaments=num_participated_tournaments,
num_won_tournaments=num_won_tournaments
)
player.save()
return player
@ -128,7 +129,7 @@ def update_player_statistics(player_name):
total_score = matches_as_player1.aggregate(Sum('score_player1'))['score_player1__sum'] or 0
total_score += matches_as_player2.aggregate(Sum('score_player2'))['score_player2__sum'] or 0
total_score_adv = matches_as_player1.aggregate(Sum('score_player2'))['score_player2__sum'] or 0
total_score_adv += matches_as_player2.aggregate(Sum('score_player1'))['score_player1__sum'] or 0

View File

@ -136,6 +136,32 @@ CHANNEL_LAYERS = {
},
}
LOGGING = {
'version': 1, # Django requires this key
'disable_existing_loggers': False, # Keep Django's default loggers
'formatters': {
'simple': {
'format': '{levelname} {message}',
'style': '{', # Allows to use Python's new style string formatting
},
},
'handlers': {
'console': { # Log to the console
'level': 'DEBUG', # Minimum level of messages that should be handled
'class': 'logging.StreamHandler',
'formatter': 'simple', # Use the simple formatter defined above
},
},
'loggers': {
'django': { # The main logger for Django itself
'handlers': ['console'],
'level': 'DEBUG', # Minimum log level to be logged
'propagate': False, # Prevents log propagation to other loggers
},
},
}
"""
LOGGING = {
'version': 1, # The version of the logging configuration schema
'disable_existing_loggers': False, # Allows existing loggers to keep logging
@ -170,3 +196,4 @@ LOGGING = {
},
},
}
"""