mirror of
https://github.com/AudebertAdrien/ft_transcendence.git
synced 2025-12-15 21:56:50 +01:00
database saving works
This commit is contained in:
parent
cf45f2daf4
commit
496ab98e2e
@ -13,6 +13,6 @@ COPY certs/ certs/
|
|||||||
|
|
||||||
RUN python3 -m venv venv
|
RUN python3 -m venv venv
|
||||||
RUN venv/bin/pip3 install --upgrade pip
|
RUN venv/bin/pip3 install --upgrade pip
|
||||||
RUN venv/bin/pip3 install --no-cache-dir -r requirements.txt
|
RUN venv/bin/pip3 install --no-cache-dir -r requirements.txt -v
|
||||||
|
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
|
|||||||
240
docker-compose.yml.old
Normal file
240
docker-compose.yml.old
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
services:
|
||||||
|
setup:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
|
||||||
|
container_name: setup
|
||||||
|
user: "0"
|
||||||
|
volumes:
|
||||||
|
- certs:/usr/share/elasticsearch/config/certs
|
||||||
|
command: >
|
||||||
|
bash -c '
|
||||||
|
if [ x${ELASTIC_PASSWORD} == x ]; then
|
||||||
|
echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
|
||||||
|
exit 1;
|
||||||
|
elif [ x${KIBANA_PASSWORD} == x ]; then
|
||||||
|
echo "Set the KIBANA_PASSWORD environment variable in the .env file";
|
||||||
|
exit 1;
|
||||||
|
fi;
|
||||||
|
if [ ! -f config/certs/ca.zip ]; then
|
||||||
|
echo "Creating CA";
|
||||||
|
bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip;
|
||||||
|
unzip config/certs/ca.zip -d config/certs;
|
||||||
|
fi;
|
||||||
|
if [ ! -f config/certs/certs.zip ]; then
|
||||||
|
echo "Creating certs";
|
||||||
|
echo -ne \
|
||||||
|
"instances:\n"\
|
||||||
|
" - name: es01\n"\
|
||||||
|
" dns:\n"\
|
||||||
|
" - es01\n"\
|
||||||
|
" - localhost\n"\
|
||||||
|
" ip:\n"\
|
||||||
|
" - 127.0.0.1\n"\
|
||||||
|
" - name: kibana\n"\
|
||||||
|
" dns:\n"\
|
||||||
|
" - kibana\n"\
|
||||||
|
" - localhost\n"\
|
||||||
|
" ip:\n"\
|
||||||
|
" - 127.0.0.1\n"\
|
||||||
|
> config/certs/instances.yml;
|
||||||
|
|
||||||
|
bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key;
|
||||||
|
unzip config/certs/certs.zip -d config/certs;
|
||||||
|
fi;
|
||||||
|
|
||||||
|
echo "Setting file permissions"
|
||||||
|
chown -R root:root config/certs;
|
||||||
|
find . -type d -exec chmod 750 \{\} \;;
|
||||||
|
find . -type f -exec chmod 640 \{\} \;;
|
||||||
|
|
||||||
|
echo "Waiting for Elasticsearch availability";
|
||||||
|
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
|
||||||
|
echo "Setting kibana_system password";
|
||||||
|
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
|
||||||
|
echo "All done!";
|
||||||
|
'
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"]
|
||||||
|
interval: 1s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 120
|
||||||
|
|
||||||
|
backend:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
image: backend
|
||||||
|
container_name: backend
|
||||||
|
restart: always
|
||||||
|
command: /bin/sh -c "sleep 5 &&
|
||||||
|
venv/bin/python manage.py makemigrations --noinput &&
|
||||||
|
venv/bin/python manage.py migrate --noinput &&
|
||||||
|
venv/bin/python manage.py collectstatic --noinput &&
|
||||||
|
venv/bin/daphne -e ssl:8080:privateKey=./certs/ssl/backend-key.pem:certKey=./certs/ssl/backend-cert.pem pong.asgi:application"
|
||||||
|
volumes:
|
||||||
|
- pong:/transcendence/pong
|
||||||
|
- pong_django_logs:/transcendence/logs
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
networks:
|
||||||
|
- app-network
|
||||||
|
environment:
|
||||||
|
DB_HOST: db
|
||||||
|
DB_PORT: 5432
|
||||||
|
DB_NAME: ${POSTGRES_DB}
|
||||||
|
DB_USER: ${POSTGRES_USER}
|
||||||
|
DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
SECURE_SSL_REDIRECT: True
|
||||||
|
SECURE_HSTS_SECONDS: 31536000
|
||||||
|
SECURE_HSTS_INCLUDE_SUBDOMAINS: True
|
||||||
|
SECURE_HSTS_PRELOAD: True
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "curl", "http://localhost:8080"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 10s
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: postgres:latest
|
||||||
|
container_name: postgres
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- pong_pg_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
networks:
|
||||||
|
- app-network
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB}
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
|
es01:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
|
||||||
|
container_name: es01
|
||||||
|
depends_on:
|
||||||
|
setup:
|
||||||
|
condition: service_healthy
|
||||||
|
volumes:
|
||||||
|
- certs:/usr/share/elasticsearch/config/certs:ro
|
||||||
|
- pong_es_data_01:/usr/share/elasticsearch/data
|
||||||
|
labels:
|
||||||
|
co.elastic.logs/module: elasticsearch
|
||||||
|
ports:
|
||||||
|
- 9200:9200
|
||||||
|
environment:
|
||||||
|
- node.name=es01
|
||||||
|
- cluster.name=${CLUSTER_NAME}
|
||||||
|
- discovery.type=single-node
|
||||||
|
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
|
||||||
|
- bootstrap.memory_lock=true
|
||||||
|
- xpack.security.enabled=true
|
||||||
|
- xpack.security.http.ssl.enabled=true
|
||||||
|
- xpack.security.http.ssl.key=certs/es01/es01.key
|
||||||
|
- xpack.security.http.ssl.certificate=certs/es01/es01.crt
|
||||||
|
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
|
||||||
|
- xpack.security.transport.ssl.enabled=true
|
||||||
|
- xpack.security.transport.ssl.key=certs/es01/es01.key
|
||||||
|
- xpack.security.transport.ssl.certificate=certs/es01/es01.crt
|
||||||
|
- xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt
|
||||||
|
- xpack.security.transport.ssl.verification_mode=certificate
|
||||||
|
- xpack.license.self_generated.type=${LICENSE}
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD-SHELL",
|
||||||
|
"curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
|
||||||
|
]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
|
||||||
|
container_name: kibana
|
||||||
|
labels:
|
||||||
|
co.elastic.logs/module: kibana
|
||||||
|
depends_on:
|
||||||
|
es01:
|
||||||
|
condition: service_healthy
|
||||||
|
volumes:
|
||||||
|
- certs:/usr/share/kibana/config/certs:ro
|
||||||
|
- pong_kibana:/usr/share/kibana/data
|
||||||
|
ports:
|
||||||
|
- 5601:5601
|
||||||
|
environment:
|
||||||
|
- SERVERNAME=kibana
|
||||||
|
- ELASTICSEARCH_HOSTS=https://es01:9200
|
||||||
|
- ELASTICSEARCH_USERNAME=${KIBANA_USERNAME}
|
||||||
|
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
|
||||||
|
- ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
|
||||||
|
- XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY}
|
||||||
|
- XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY}
|
||||||
|
- XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY}
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
"CMD-SHELL",
|
||||||
|
"curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"
|
||||||
|
]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
|
||||||
|
logstash01:
|
||||||
|
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
|
||||||
|
container_name: logstash01
|
||||||
|
labels:
|
||||||
|
co.elastic.logs/module: logstash
|
||||||
|
user: root
|
||||||
|
depends_on:
|
||||||
|
es01:
|
||||||
|
condition: service_healthy
|
||||||
|
kibana:
|
||||||
|
condition: service_healthy
|
||||||
|
volumes:
|
||||||
|
- certs:/usr/share/logstash/certs
|
||||||
|
- pong_logstash_data01:/usr/share/logstash/data
|
||||||
|
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro
|
||||||
|
- pong_django_logs:/usr/share/logstash/logs
|
||||||
|
ports:
|
||||||
|
- "5044:5044/udp"
|
||||||
|
command: logstash -f /usr/share/logstash/pipeline/logstash.conf
|
||||||
|
environment:
|
||||||
|
- NODE_NAME="logstash"
|
||||||
|
- ELASTIC_HOSTS=https://es01:9200
|
||||||
|
- ELASTIC_USER=${ELASTIC_USERNAME}
|
||||||
|
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
|
||||||
|
- xpack.monitoring.enabled=false
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
pong:
|
||||||
|
driver: local
|
||||||
|
driver_opts:
|
||||||
|
type: none
|
||||||
|
device: ${PROJECT_PATH}
|
||||||
|
o: bind
|
||||||
|
pong_django_logs:
|
||||||
|
driver: local
|
||||||
|
pong_pg_data:
|
||||||
|
driver: local
|
||||||
|
pong_es_data_01:
|
||||||
|
driver: local
|
||||||
|
pong_kibana:
|
||||||
|
driver: local
|
||||||
|
pong_logstash_data01:
|
||||||
|
driver: local
|
||||||
|
certs:
|
||||||
|
driver: local
|
||||||
|
|
||||||
|
networks:
|
||||||
|
app-network:
|
||||||
|
name: app-network
|
||||||
|
driver: bridge
|
||||||
2
makefile
2
makefile
@ -3,7 +3,7 @@ COMPOSE=docker compose -f $(COMPOSE_FILE)
|
|||||||
CONTAINER=$(c)
|
CONTAINER=$(c)
|
||||||
|
|
||||||
up: down
|
up: down
|
||||||
$(COMPOSE) build
|
$(COMPOSE) build
|
||||||
$(COMPOSE) up -d $(CONTAINER) || true
|
$(COMPOSE) up -d $(CONTAINER) || true
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from .models import Tournoi
|
|||||||
|
|
||||||
class Game:
|
class Game:
|
||||||
# Global variable to handle the using of the database
|
# Global variable to handle the using of the database
|
||||||
USING_DB = False
|
#USING_DB = False
|
||||||
|
|
||||||
def __init__(self, game_id, player1, player2, localgame):
|
def __init__(self, game_id, player1, player2, localgame):
|
||||||
self.game_id = game_id
|
self.game_id = game_id
|
||||||
@ -46,6 +46,7 @@ class Game:
|
|||||||
}
|
}
|
||||||
self.speed = 1
|
self.speed = 1
|
||||||
self.game_loop_task = None
|
self.game_loop_task = None
|
||||||
|
self.database = None
|
||||||
self.ended = False
|
self.ended = False
|
||||||
self.p1_mov = 0
|
self.p1_mov = 0
|
||||||
self.p2_mov = 0
|
self.p2_mov = 0
|
||||||
@ -236,7 +237,8 @@ class Game:
|
|||||||
})
|
})
|
||||||
if not self.botgame:
|
if not self.botgame:
|
||||||
if not self.localgame:
|
if not self.localgame:
|
||||||
await remaining_player.send(message)
|
await remaining_player.send(message)
|
||||||
|
|
||||||
# Notify both players that the game has ended
|
# Notify both players that the game has ended
|
||||||
end_message = json.dumps({
|
end_message = json.dumps({
|
||||||
'type': 'game_ended',
|
'type': 'game_ended',
|
||||||
@ -246,17 +248,37 @@ class Game:
|
|||||||
if not self.botgame:
|
if not self.botgame:
|
||||||
if not self.localgame:
|
if not self.localgame:
|
||||||
await self.player2.send(end_message)
|
await self.player2.send(end_message)
|
||||||
while (Game.USING_DB):
|
|
||||||
await asyncio.sleep(1)
|
#while (Game.USING_DB):
|
||||||
Game.USING_DB = True
|
# await asyncio.sleep(1)
|
||||||
|
#Game.USING_DB = True
|
||||||
if hasattr(self, 'tournament'):
|
if hasattr(self, 'tournament'):
|
||||||
print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} ENDED ***")
|
print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} ENDED ***")
|
||||||
await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'],
|
|
||||||
self.game_state['player1_score'], self.game_state['player2_score'],
|
# Create the async task
|
||||||
self.bt1, self.bt2, duration, True, self.tournament.tournoi_reg)
|
self.database_task = asyncio.create_task(
|
||||||
print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} is REGISTERED ***")
|
sync_to_async(handle_game_data)(
|
||||||
|
self.game_state['player1_name'], self.game_state['player2_name'],
|
||||||
|
self.game_state['player1_score'], self.game_state['player2_score'],
|
||||||
|
self.bt1, self.bt2, duration, True, self.tournament.tournoi_reg
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Optionally wait for the task to complete if necessary
|
||||||
|
#await self.database
|
||||||
|
|
||||||
|
'''await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'],
|
||||||
|
self.game_state['player1_score'], self.game_state['player2_score'],
|
||||||
|
self.bt1, self.bt2, duration, True, self.tournament.tournoi_reg)'''
|
||||||
|
print(f"*** Game #{self.game_id} from tournament: {self.tournament.tournoi_reg.name} is REGISTERED ***")
|
||||||
else:
|
else:
|
||||||
await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'],
|
self.database_task = asyncio.create_task(
|
||||||
self.game_state['player1_score'], self.game_state['player2_score'],
|
sync_to_async(handle_game_data)(
|
||||||
self.bt1, self.bt2, duration, False, None)
|
self.game_state['player1_name'], self.game_state['player2_name'],
|
||||||
Game.USING_DB = False
|
self.game_state['player1_score'], self.game_state['player2_score'],
|
||||||
|
self.bt1, self.bt2, duration, False, None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
'''await sync_to_async(handle_game_data)(self.game_state['player1_name'], self.game_state['player2_name'],
|
||||||
|
self.game_state['player1_score'], self.game_state['player2_score'],
|
||||||
|
self.bt1, self.bt2, duration, False, None)'''
|
||||||
|
#Game.USING_DB = False
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
# /pong/game/utils.py
|
||||||
|
|
||||||
from .models import Player, Tournoi, Match
|
from .models import Player, Tournoi, Match
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
|||||||
@ -29,14 +29,15 @@ ALLOWED_HOSTS = ['*']
|
|||||||
# Application definition
|
# Application definition
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
INSTALLED_APPS = [
|
||||||
'django.contrib.admin',
|
#'django.contrib.admin',
|
||||||
'django.contrib.auth',
|
'django.contrib.auth',
|
||||||
'django.contrib.contenttypes',
|
'django.contrib.contenttypes',
|
||||||
'django.contrib.sessions',
|
#'django.contrib.sessions',
|
||||||
'django.contrib.messages',
|
#'django.contrib.messages',
|
||||||
'django.contrib.staticfiles',
|
'django.contrib.staticfiles',
|
||||||
'channels',
|
'channels',
|
||||||
'pong.game',
|
'pong.game',
|
||||||
|
#'django_db_conn_pool',
|
||||||
'rest_framework'
|
'rest_framework'
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -46,7 +47,7 @@ MIDDLEWARE = [
|
|||||||
'django.middleware.common.CommonMiddleware',
|
'django.middleware.common.CommonMiddleware',
|
||||||
'django.middleware.csrf.CsrfViewMiddleware',
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
'django.contrib.messages.middleware.MessageMiddleware',
|
#'django.contrib.messages.middleware.MessageMiddleware',
|
||||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -81,6 +82,7 @@ DATABASES = {
|
|||||||
'PASSWORD': os.getenv('DB_PASSWORD'),
|
'PASSWORD': os.getenv('DB_PASSWORD'),
|
||||||
'HOST': os.getenv('DB_HOST'),
|
'HOST': os.getenv('DB_HOST'),
|
||||||
'PORT': '5432',
|
'PORT': '5432',
|
||||||
|
'CONN_MAX_AGE': None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -136,7 +138,7 @@ CHANNEL_LAYERS = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
'''LOGGING = {
|
LOGGING = {
|
||||||
'version': 1, # The version of the logging configuration schema
|
'version': 1, # The version of the logging configuration schema
|
||||||
'disable_existing_loggers': False, # Allows existing loggers to keep logging
|
'disable_existing_loggers': False, # Allows existing loggers to keep logging
|
||||||
'formatters': { # Defines how log messages will be formatted
|
'formatters': { # Defines how log messages will be formatted
|
||||||
@ -169,4 +171,4 @@ CHANNEL_LAYERS = {
|
|||||||
'propagate': True, # If True, messages will be passed to the parent loggers as well
|
'propagate': True, # If True, messages will be passed to the parent loggers as well
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}'''
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user