diff --git a/Dockerfile b/Dockerfile index d4278cf..b9fd487 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,6 +13,6 @@ COPY certs/ certs/ RUN python3 -m venv venv RUN venv/bin/pip3 install --upgrade pip -RUN venv/bin/pip3 install --no-cache-dir -r requirements.txt +RUN venv/bin/pip3 install --no-cache-dir -r requirements.txt -v EXPOSE 8080 diff --git a/docker-compose.yml.old b/docker-compose.yml.old new file mode 100644 index 0000000..f1ecee9 --- /dev/null +++ b/docker-compose.yml.old @@ -0,0 +1,240 @@ +services: + setup: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: setup + user: "0" + volumes: + - certs:/usr/share/elasticsearch/config/certs + command: > + bash -c ' + if [ x${ELASTIC_PASSWORD} == x ]; then + echo "Set the ELASTIC_PASSWORD environment variable in the .env file"; + exit 1; + elif [ x${KIBANA_PASSWORD} == x ]; then + echo "Set the KIBANA_PASSWORD environment variable in the .env file"; + exit 1; + fi; + if [ ! -f config/certs/ca.zip ]; then + echo "Creating CA"; + bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip; + unzip config/certs/ca.zip -d config/certs; + fi; + if [ ! -f config/certs/certs.zip ]; then + echo "Creating certs"; + echo -ne \ + "instances:\n"\ + " - name: es01\n"\ + " dns:\n"\ + " - es01\n"\ + " - localhost\n"\ + " ip:\n"\ + " - 127.0.0.1\n"\ + " - name: kibana\n"\ + " dns:\n"\ + " - kibana\n"\ + " - localhost\n"\ + " ip:\n"\ + " - 127.0.0.1\n"\ + > config/certs/instances.yml; + + bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key; + unzip config/certs/certs.zip -d config/certs; + fi; + + echo "Setting file permissions" + chown -R root:root config/certs; + find . -type d -exec chmod 750 \{\} \;; + find . -type f -exec chmod 640 \{\} \;; + + echo "Waiting for Elasticsearch availability"; + until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done; + echo "Setting kibana_system password"; + until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done; + echo "All done!"; + ' + healthcheck: + test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"] + interval: 1s + timeout: 5s + retries: 120 + + backend: + build: + context: . + dockerfile: Dockerfile + image: backend + container_name: backend + restart: always + command: /bin/sh -c "sleep 5 && + venv/bin/python manage.py makemigrations --noinput && + venv/bin/python manage.py migrate --noinput && + venv/bin/python manage.py collectstatic --noinput && + venv/bin/daphne -e ssl:8080:privateKey=./certs/ssl/backend-key.pem:certKey=./certs/ssl/backend-cert.pem pong.asgi:application" + volumes: + - pong:/transcendence/pong + - pong_django_logs:/transcendence/logs + ports: + - 8080:8080 + networks: + - app-network + environment: + DB_HOST: db + DB_PORT: 5432 + DB_NAME: ${POSTGRES_DB} + DB_USER: ${POSTGRES_USER} + DB_PASSWORD: ${POSTGRES_PASSWORD} + SECURE_SSL_REDIRECT: True + SECURE_HSTS_SECONDS: 31536000 + SECURE_HSTS_INCLUDE_SUBDOMAINS: True + SECURE_HSTS_PRELOAD: True + depends_on: + - db + healthcheck: + test: ["CMD-SHELL", "curl", "http://localhost:8080"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + + db: + image: postgres:latest + container_name: postgres + restart: always + volumes: + - pong_pg_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app-network + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + interval: 10s + timeout: 5s + retries: 5 + + es01: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: es01 + depends_on: + setup: + condition: service_healthy + volumes: + - certs:/usr/share/elasticsearch/config/certs:ro + - pong_es_data_01:/usr/share/elasticsearch/data + labels: + co.elastic.logs/module: elasticsearch + ports: + - 9200:9200 + environment: + - node.name=es01 + - cluster.name=${CLUSTER_NAME} + - discovery.type=single-node + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - bootstrap.memory_lock=true + - xpack.security.enabled=true + - xpack.security.http.ssl.enabled=true + - xpack.security.http.ssl.key=certs/es01/es01.key + - xpack.security.http.ssl.certificate=certs/es01/es01.crt + - xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt + - xpack.security.transport.ssl.enabled=true + - xpack.security.transport.ssl.key=certs/es01/es01.key + - xpack.security.transport.ssl.certificate=certs/es01/es01.crt + - xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt + - xpack.security.transport.ssl.verification_mode=certificate + - xpack.license.self_generated.type=${LICENSE} + healthcheck: + test: + [ + "CMD-SHELL", + "curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'", + ] + interval: 10s + timeout: 10s + retries: 120 + + kibana: + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} + container_name: kibana + labels: + co.elastic.logs/module: kibana + depends_on: + es01: + condition: service_healthy + volumes: + - certs:/usr/share/kibana/config/certs:ro + - pong_kibana:/usr/share/kibana/data + ports: + - 5601:5601 + environment: + - SERVERNAME=kibana + - ELASTICSEARCH_HOSTS=https://es01:9200 + - ELASTICSEARCH_USERNAME=${KIBANA_USERNAME} + - ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD} + - ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt + - XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY} + - XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY} + - XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY} + healthcheck: + test: + [ + "CMD-SHELL", + "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'" + ] + interval: 10s + timeout: 10s + retries: 120 + + logstash01: + image: docker.elastic.co/logstash/logstash:${STACK_VERSION} + container_name: logstash01 + labels: + co.elastic.logs/module: logstash + user: root + depends_on: + es01: + condition: service_healthy + kibana: + condition: service_healthy + volumes: + - certs:/usr/share/logstash/certs + - pong_logstash_data01:/usr/share/logstash/data + - ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro + - pong_django_logs:/usr/share/logstash/logs + ports: + - "5044:5044/udp" + command: logstash -f /usr/share/logstash/pipeline/logstash.conf + environment: + - NODE_NAME="logstash" + - ELASTIC_HOSTS=https://es01:9200 + - ELASTIC_USER=${ELASTIC_USERNAME} + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.monitoring.enabled=false + +volumes: + pong: + driver: local + driver_opts: + type: none + device: ${PROJECT_PATH} + o: bind + pong_django_logs: + driver: local + pong_pg_data: + driver: local + pong_es_data_01: + driver: local + pong_kibana: + driver: local + pong_logstash_data01: + driver: local + certs: + driver: local + +networks: + app-network: + name: app-network + driver: bridge diff --git a/makefile b/makefile index 899ef5a..77b7a38 100644 --- a/makefile +++ b/makefile @@ -3,7 +3,7 @@ COMPOSE=docker compose -f $(COMPOSE_FILE) CONTAINER=$(c) up: down - $(COMPOSE) build + $(COMPOSE) build $(COMPOSE) up -d $(CONTAINER) || true build: diff --git a/pong/game/game.py b/pong/game/game.py index 42bfda6..cb982cb 100644 --- a/pong/game/game.py +++ b/pong/game/game.py @@ -10,7 +10,7 @@ from .models import Tournoi class Game: # Global variable to handle the using of the database - USING_DB = False + #USING_DB = False def __init__(self, game_id, player1, player2, localgame): self.game_id = game_id @@ -46,6 +46,7 @@ class Game: } self.speed = 1 self.game_loop_task = None + self.database = None self.ended = False self.p1_mov = 0 self.p2_mov = 0 @@ -236,7 +237,8 @@ class Game: }) if not self.botgame: if not self.localgame: - await remaining_player.send(message) + await remaining_player.send(message) + # Notify both players that the game has ended end_message = json.dumps({ 'type': 'game_ended', @@ -246,17 +248,37 @@ class Game: if not self.botgame: if not self.localgame: await self.player2.send(end_message) - while (Game.USING_DB): - await asyncio.sleep(1) - Game.USING_DB = True + + #while (Game.USING_DB): + # await asyncio.sleep(1) + #Game.USING_DB = True if hasattr(self, 'tournament'): - print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} ENDED ***") - await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'], - self.game_state['player1_score'], self.game_state['player2_score'], - self.bt1, self.bt2, duration, True, self.tournament.tournoi_reg) - print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} is REGISTERED ***") + print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} ENDED ***") + + # Create the async task + self.database_task = asyncio.create_task( + sync_to_async(handle_game_data)( + self.game_state['player1_name'], self.game_state['player2_name'], + self.game_state['player1_score'], self.game_state['player2_score'], + self.bt1, self.bt2, duration, True, self.tournament.tournoi_reg + ) + ) + # Optionally wait for the task to complete if necessary + #await self.database + + '''await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'], + self.game_state['player1_score'], self.game_state['player2_score'], + self.bt1, self.bt2, duration, True, self.tournament.tournoi_reg)''' + print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} is REGISTERED ***") else: - await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'], - self.game_state['player1_score'], self.game_state['player2_score'], - self.bt1, self.bt2, duration, False, None) - Game.USING_DB = False + self.database_task = asyncio.create_task( + sync_to_async(handle_game_data)( + self.game_state['player1_name'], self.game_state['player2_name'], + self.game_state['player1_score'], self.game_state['player2_score'], + self.bt1, self.bt2, duration, False, None + ) + ) + '''await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'], + self.game_state['player1_score'], self.game_state['player2_score'], + self.bt1, self.bt2, duration, False, None)''' + #Game.USING_DB = False diff --git a/pong/game/utils.py b/pong/game/utils.py index 0f193d1..b690785 100644 --- a/pong/game/utils.py +++ b/pong/game/utils.py @@ -1,3 +1,5 @@ +# /pong/game/utils.py + from .models import Player, Tournoi, Match from django.core.exceptions import ValidationError from django.shortcuts import get_object_or_404 diff --git a/pong/settings.py b/pong/settings.py index 446ec08..f419053 100644 --- a/pong/settings.py +++ b/pong/settings.py @@ -29,14 +29,15 @@ ALLOWED_HOSTS = ['*'] # Application definition INSTALLED_APPS = [ - 'django.contrib.admin', + #'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', + #'django.contrib.sessions', + #'django.contrib.messages', 'django.contrib.staticfiles', 'channels', 'pong.game', + #'django_db_conn_pool', 'rest_framework' ] @@ -46,7 +47,7 @@ MIDDLEWARE = [ 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', + #'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] @@ -81,6 +82,7 @@ DATABASES = { 'PASSWORD': os.getenv('DB_PASSWORD'), 'HOST': os.getenv('DB_HOST'), 'PORT': '5432', + 'CONN_MAX_AGE': None, } } @@ -136,7 +138,7 @@ CHANNEL_LAYERS = { }, } -'''LOGGING = { +LOGGING = { 'version': 1, # The version of the logging configuration schema 'disable_existing_loggers': False, # Allows existing loggers to keep logging 'formatters': { # Defines how log messages will be formatted @@ -169,4 +171,4 @@ CHANNEL_LAYERS = { 'propagate': True, # If True, messages will be passed to the parent loggers as well }, }, -}''' +}