diff --git a/docker-compose.yml b/docker-compose.yml index aa21870..33a4a7e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,63 @@ services: + setup: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: setup + user: "0" + volumes: + - certs:/usr/share/elasticsearch/config/certs + command: > + bash -c ' + if [ x${ELASTIC_PASSWORD} == x ]; then + echo "Set the ELASTIC_PASSWORD environment variable in the .env file"; + exit 1; + elif [ x${KIBANA_PASSWORD} == x ]; then + echo "Set the KIBANA_PASSWORD environment variable in the .env file"; + exit 1; + fi; + if [ ! -f config/certs/ca.zip ]; then + echo "Creating CA"; + bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip; + unzip config/certs/ca.zip -d config/certs; + fi; + if [ ! -f config/certs/certs.zip ]; then + echo "Creating certs"; + echo -ne \ + "instances:\n"\ + " - name: es01\n"\ + " dns:\n"\ + " - es01\n"\ + " - localhost\n"\ + " ip:\n"\ + " - 127.0.0.1\n"\ + " - name: kibana\n"\ + " dns:\n"\ + " - kibana\n"\ + " - localhost\n"\ + " ip:\n"\ + " - 127.0.0.1\n"\ + > config/certs/instances.yml; + + bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key; + unzip config/certs/certs.zip -d config/certs; + fi; + + echo "Setting file permissions" + chown -R root:root config/certs; + find . -type d -exec chmod 750 \{\} \;; + find . -type f -exec chmod 640 \{\} \;; + + echo "Waiting for Elasticsearch availability"; + until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done; + echo "Setting kibana_system password"; + until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done; + echo "All done!"; + ' + healthcheck: + test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"] + interval: 1s + timeout: 5s + retries: 120 + backend: build: context: . @@ -53,6 +112,104 @@ services: timeout: 5s retries: 5 + es01: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: es01 + depends_on: + setup: + condition: service_healthy + volumes: + - certs:/usr/share/elasticsearch/config/certs:ro + - pong_es_data_01:/usr/share/elasticsearch/data + labels: + co.elastic.logs/module: elasticsearch + ports: + - 9200:9200 + environment: + - node.name=es01 + - cluster.name=${CLUSTER_NAME} + - discovery.type=single-node + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - bootstrap.memory_lock=true + - xpack.security.enabled=true + - xpack.security.http.ssl.enabled=true + - xpack.security.http.ssl.key=certs/es01/es01.key + - xpack.security.http.ssl.certificate=certs/es01/es01.crt + - xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt + - xpack.security.transport.ssl.enabled=true + - xpack.security.transport.ssl.key=certs/es01/es01.key + - xpack.security.transport.ssl.certificate=certs/es01/es01.crt + - xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt + - xpack.security.transport.ssl.verification_mode=certificate + - xpack.license.self_generated.type=${LICENSE} + healthcheck: + test: + [ + "CMD-SHELL", + "curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'", + ] + interval: 10s + timeout: 10s + retries: 120 + + kibana: + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} + container_name: kibana + labels: + co.elastic.logs/module: kibana + depends_on: + es01: + condition: service_healthy + volumes: + - certs:/usr/share/kibana/config/certs:ro + - pong_kibana:/usr/share/kibana/data + ports: + - 5601:5601 + environment: + - SERVERNAME=kibana + - ELASTICSEARCH_HOSTS=https://es01:9200 + - ELASTICSEARCH_USERNAME=${KIBANA_USERNAME} + - ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD} + - ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt + - XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY} + - XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY} + - XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY} + healthcheck: + test: + [ + "CMD-SHELL", + "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'" + ] + interval: 10s + timeout: 10s + retries: 120 + + logstash01: + image: docker.elastic.co/logstash/logstash:${STACK_VERSION} + container_name: logstash01 + labels: + co.elastic.logs/module: logstash + user: root + depends_on: + es01: + condition: service_healthy + kibana: + condition: service_healthy + volumes: + - certs:/usr/share/logstash/certs + - pong_logstash_data01:/usr/share/logstash/data + - ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro + - pong_django_logs:/usr/share/logstash/logs + ports: + - "5044:5044/udp" + command: logstash -f /usr/share/logstash/pipeline/logstash.conf + environment: + - NODE_NAME="logstash" + - ELASTIC_HOSTS=https://es01:9200 + - ELASTIC_USER=${ELASTIC_USERNAME} + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.monitoring.enabled=false + volumes: pong: driver: local @@ -68,6 +225,14 @@ volumes: o: bind pong_pg_data: driver: local + pong_es_data_01: + driver: local + pong_kibana: + driver: local + pong_logstash_data01: + driver: local + certs: + driver: local networks: app-network: diff --git a/docker-compose.yml_old b/docker-compose.yml_old new file mode 100644 index 0000000..aa21870 --- /dev/null +++ b/docker-compose.yml_old @@ -0,0 +1,75 @@ +services: + backend: + build: + context: . + dockerfile: Dockerfile + image: backend + container_name: backend + restart: always + command: /bin/sh -c "sleep 5 && + venv/bin/python manage.py makemigrations --noinput && + venv/bin/python manage.py migrate --noinput && + venv/bin/python manage.py collectstatic --noinput && + venv/bin/daphne -b 0.0.0.0 -p 8080 pong.asgi:application" + volumes: + - pong:/transcendence/pong + - pong_django_logs:/transcendence/logs + ports: + - 8080:8080 + networks: + - app-network + environment: + DB_HOST: db + DB_PORT: 5432 + DB_NAME: ${POSTGRES_DB} + DB_USER: ${POSTGRES_USER} + DB_PASSWORD: ${POSTGRES_PASSWORD} + depends_on: + - db + healthcheck: + test: ["CMD-SHELL", "curl", "http://localhost:8080"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + + db: + image: postgres:latest + container_name: postgres + restart: always + volumes: + - pong_pg_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app-network + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + interval: 10s + timeout: 5s + retries: 5 + +volumes: + pong: + driver: local + driver_opts: + type: none + device: ${PROJECT_PATH} + o: bind + pong_django_logs: + driver: local + driver_opts: + type: none + device: ${DJANGO_LOGS} + o: bind + pong_pg_data: + driver: local + +networks: + app-network: + name: app-network + driver: bridge diff --git a/pong/game/templates/pong/tournament_brackets.html b/pong/game/templates/pong/tournament_brackets.html index f7f0999..ca65553 100644 --- a/pong/game/templates/pong/tournament_brackets.html +++ b/pong/game/templates/pong/tournament_brackets.html @@ -41,14 +41,20 @@
+ {% for round in tournament_rounds %}
+ {% for match in round %}
+ {{ match.player1 }}
+ {{ match.player2|default:"BYE" }}
+ {% endfor %}
+ {% endfor %}
\ No newline at end of file diff --git a/pong/game/tournament.py b/pong/game/tournament.py index 4c45336..4416135 100644 --- a/pong/game/tournament.py +++ b/pong/game/tournament.py @@ -13,12 +13,6 @@ class TournamentMatch(Game): super().__init__(game_id, player1, player2, False) # Store the current game instance in active games match_maker.active_games[game_id] = self - # Set the game for the players - '''player1.set_game(self) - print(f"{player1.user.username} set to game #{self}") - if player2: - player2.set_game(self) - print(f"{player2.user.username} set to game #{self}")''' # Store the tournament instance self.tournament = tournament @@ -36,12 +30,15 @@ class TournamentMatchMaker: self.rounds = [] self.current_round = 0 self.games = 0 - self.tournament_state = "waiting" # Can be "waiting", "in_progress", or "ended" + self.tournament_state = "waiting" #Can be "waiting", "in_progress", or "ended" async def add_player(self, player): if self.tournament_state == "waiting" and player not in self.waiting_players: self.waiting_players.append(player) - print(f"User {player.user.username} joins the TOURNAMENT WAITING ROOM") + if player: + print(f"User {player.user.username} joins the TOURNAMENT WAITING ROOM") + else: + print("BOT joins the TOURNAMENT WAITING ROOM") await self.update_waiting_room() async def update_waiting_room(self): @@ -54,7 +51,7 @@ class TournamentMatchMaker: def generate_waiting_room_html(self): context = { - 'players': [player.user.username for player in self.waiting_players], + 'players': [player.user.username if player else 'BOT' for player in self.waiting_players], 'tournament_state': self.tournament_state, 'players_count': len(self.waiting_players), 'min_players_to_start': 2 # You can adjust this number as needed @@ -62,7 +59,8 @@ class TournamentMatchMaker: return render_to_string('pong/tournament_waiting_room.html', context) async def send_to_player(self, player, data): - await player.send(json.dumps(data)) + if player: + await player.send(json.dumps(data)) async def remove_player(self, player): if player in self.waiting_players: @@ -72,7 +70,9 @@ class TournamentMatchMaker: # Tournament start method async def start_tournament(self): if len(self.waiting_players) < 2: - return False + return False + if len(self.waiting_players) % 2 == 0: + await self.add_player(None) self.tournament_state = "in_progress" random.shuffle(self.waiting_players) self.current_round = 0 @@ -108,13 +108,15 @@ class TournamentMatchMaker: matches.append(match) else: # Create a BYE match where the second player is None - match = TournamentMatch(self.games, players[i], None, self) # BYE match + match = TournamentMatch(self.games, players[i], None, self) # BYE match matches.append(match) # Assign the new match instance to the players - await players[i].set_game(match) + if players[i]: + await players[i].set_game(match) if i + 1 < len(players): - await players[i + 1].set_game(match) + if players[i + 1]: + await players[i + 1].set_game(match) self.rounds.append(matches) self.matches.extend(matches) @@ -157,10 +159,10 @@ class TournamentMatchMaker: elif match.player1: # Handle BYE match await match_maker.notify_players(match.player1, match.player2, match.game_id, False) - match.game_state['player1_score'] = 3 + asyncio.create_task(match.start_game()) + '''match.game_state['player1_score'] = 3 match.game_state['player2_score'] = 0 - await match.end_game() - #asyncio.create_task(match.start_game()) + await match.end_game()''' def get_round_winners(self): winners = [] diff --git a/pong/settings.py b/pong/settings.py index 446ec08..b44ccdf 100644 --- a/pong/settings.py +++ b/pong/settings.py @@ -136,7 +136,7 @@ CHANNEL_LAYERS = { }, } -'''LOGGING = { +LOGGING = { 'version': 1, # The version of the logging configuration schema 'disable_existing_loggers': False, # Allows existing loggers to keep logging 'formatters': { # Defines how log messages will be formatted @@ -169,4 +169,4 @@ CHANNEL_LAYERS = { 'propagate': True, # If True, messages will be passed to the parent loggers as well }, }, -}''' +}