diff --git a/.env b/.env index 4ecdb09..b4f3900 100644 --- a/.env +++ b/.env @@ -12,4 +12,18 @@ DB_HOST=db DB_PORT=5432 PROJECT_PATH=${PWD}/pong -POSTGRES_DATA_PATH=${PWD}/data/db \ No newline at end of file +POSTGRES_DATA_PATH=${PWD}/data/db + +# ElasticSearch settings +STACK_VERSION=8.14.3 +CLUSTER_NAME=docker-cluster +LICENSE=trial + +ELASTIC_USERNAME=adrien +ELASTIC_PASSWORD=qwerty42 + +# Kibana settings +KIBANA_PASSWORD=qwerty42 +KIBANA_PORT=5601 + +ENCRYPTION_KEY=c34d38b3a14956121ff2170e5030b471551370178f43e5626eec58b04a30fae2 diff --git a/.gitignore b/.gitignore index 6ccca4f..bc4f360 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,4 @@ venv/ __pycache__/ data/ .env -makefile \ No newline at end of file +makefile diff --git a/docker-compose.yaml b/docker-compose.yaml index 351dbe4..8b2b3dd 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,24 +1,4 @@ services: - db: - image: postgres:latest - container_name: postgres - restart: always - volumes: - - postgres_data:/var/lib/postgresql/data - ports: - - "5432:5432" - healthcheck: - test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] - interval: 10s - timeout: 5s - retries: 5 - networks: - - app-network - environment: - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_USER: ${POSTGRES_USER} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - backend: build: context: . @@ -27,16 +7,14 @@ services: container_name: backend restart: always command: /bin/sh -c "sleep 5 && - venv/bin/python manage.py makemigrations --noinput && - venv/bin/python manage.py migrate --noinput && - venv/bin/python manage.py collectstatic --noinput && - venv/bin/daphne -b 0.0.0.0 -p 80 pong.asgi:application" + venv/bin/python manage.py makemigrations --noinput && + venv/bin/python manage.py migrate --noinput && + venv/bin/python manage.py collectstatic --noinput && + venv/bin/daphne -b 0.0.0.0 -p 8080 pong.asgi:application" volumes: - pong:/transcendence/pong ports: - - "80:80" - depends_on: - - db + - "8080:8080" networks: - app-network environment: @@ -45,25 +23,105 @@ services: DB_NAME: ${POSTGRES_DB} DB_USER: ${POSTGRES_USER} DB_PASSWORD: ${POSTGRES_PASSWORD} - healthcheck: - test: ["CMD-SHELL", "curl -f http://localhost:80 || exit 1"] - interval: 10s - timeout: 5s - retries: 5 + depends_on: + - db + + db: + image: postgres:latest + container_name: postgres + restart: always + volumes: + - pong_pg_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app-network + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + #healthcheck: + #test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + #interval: 10s + #timeout: 5s + #retries: 5 + + es01: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: es01 + volumes: + - pong_logstash_data_01:/usr/share/elasticsearch/data + ports: + - "127.0.0.1:9200:9200" + networks: + - app-network + environment: + - node.name=es01 + - cluster.name=${CLUSTER_NAME} + - discovery.type=single-node + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.security.enabled=false + - xpack.license.self_generated.type=trial + depends_on: + - logstash01 + + logstash01: + image: docker.elastic.co/logstash/logstash:${STACK_VERSION} + container_name: logstash01 + volumes: + - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + ports: + - "5044:5044" + networks: + - app-network + environment: + - ELASTIC_HOSTS=http://es01:9200 + - ELASTIC_USER=${ELASTIC_USERNAME} + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.monitoring.enabled=false + + kibana: + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} + container_name: kibana + volumes: + - pong_kibana:/usr/share/kibana/data + user: "1000:1000" + ports: + - 5601:5601 + networks: + - app-network + environment: + - SERVERNAME=pong.kibana.org + - ELASTICSEARCH_HOSTS=http://es01:9200 + - ELASTICSEARCH_USERNAME=${ELASTIC_USERNAME} + - ELASTICSEARCH_PASSWORD=${ELASTIC_PASSWORD} + depends_on: + - es01 + #healthcheck: + #test: ["CMD-SHELL", "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"] + #interval: 20s + #timeout: 10s + #retries: 120 volumes: - postgres_data: - driver: local - driver_opts: - type: none - device: ${POSTGRES_DATA_PATH} - o: bind pong: driver: local driver_opts: type: none device: ${PROJECT_PATH} o: bind + pong_pg_data: + driver: local + pong_es_data_01: + driver: local + pong_kibana: + driver: local + pong_logstash_data_01: + driver: local + pong_filebeat_data_01: + driver: local networks: app-network: diff --git a/docker-compose.yaml-simle b/docker-compose.yaml-simle new file mode 100644 index 0000000..d11b2d0 --- /dev/null +++ b/docker-compose.yaml-simle @@ -0,0 +1,61 @@ +services: + backend: + build: + context: . + dockerfile: Dockerfile + image: backend + container_name: backend + restart: always + command: /bin/sh -c "sleep 5 && + venv/bin/python manage.py makemigrations --noinput && + venv/bin/python manage.py migrate --noinput && + venv/bin/python manage.py collectstatic --noinput && + venv/bin/daphne -b 0.0.0.0 -p 8080 pong.asgi:application" + volumes: + - pong:/transcendence/pong + ports: + - "8080:8080" + networks: + - app-network + environment: + DB_HOST: db + DB_PORT: 5432 + DB_NAME: ${POSTGRES_DB} + DB_USER: ${POSTGRES_USER} + DB_PASSWORD: ${POSTGRES_PASSWORD} + depends_on: + - db + + db: + image: postgres:latest + container_name: postgres + restart: always + volumes: + - pong_pg_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app-network + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + interval: 10s + timeout: 5s + retries: 5 + +volumes: + pong: + driver: local + driver_opts: + type: none + device: ${PROJECT_PATH} + o: bind + pong_pg_data: + driver: local + +networks: + app-network: + driver: bridge diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..944e595 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,212 @@ +services: + setup: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + volumes: + - certs:/usr/share/elasticsearch/config/certs + user: "0" + command: > + bash -c ' + if [ x${ELASTIC_PASSWORD} == x ]; then + echo "Set the ELASTIC_PASSWORD environment variable in the .env file"; + exit 1; + elif [ x${KIBANA_PASSWORD} == x ]; then + echo "Set the KIBANA_PASSWORD environment variable in the .env file"; + exit 1; + fi; + if [ ! -f config/certs/ca.zip ]; then + echo "Creating CA"; + bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip; + unzip config/certs/ca.zip -d config/certs; + fi; + if [ ! -f config/certs/certs.zip ]; then + echo "Creating certs"; + echo -ne \ + "instances:\n"\ + " - name: es01\n"\ + " dns:\n"\ + " - es01\n"\ + " - localhost\n"\ + " ip:\n"\ + " - 127.0.0.1\n"\ + " - name: kibana\n"\ + " dns:\n"\ + " - kibana\n"\ + " - localhost\n"\ + " ip:\n"\ + " - 127.0.0.1\n"\ + > config/certs/instances.yml; + bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key; + unzip config/certs/certs.zip -d config/certs; + fi; + echo "Setting file permissions" + chown -R root:root config/certs; + find . -type d -exec chmod 750 \{\} \;; + find . -type f -exec chmod 640 \{\} \;; + echo "Waiting for Elasticsearch availability"; + until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done; + echo "Setting kibana_system password"; + until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done; + echo "All done!"; + ' + healthcheck: + test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"] + interval: 1s + timeout: 5s + retries: 120 + + backend: + build: + context: . + dockerfile: Dockerfile + image: backend + container_name: backend + restart: always + command: /bin/sh -c "sleep 5 && + venv/bin/python manage.py makemigrations --noinput && + venv/bin/python manage.py migrate --noinput && + venv/bin/python manage.py collectstatic --noinput && + venv/bin/daphne -b 0.0.0.0 -p 8080 pong.asgi:application" + volumes: + - pong:/transcendence/pong + ports: + - "8080:8080" + networks: + - app-network + environment: + DB_HOST: db + DB_PORT: 5432 + DB_NAME: ${POSTGRES_DB} + DB_USER: ${POSTGRES_USER} + DB_PASSWORD: ${POSTGRES_PASSWORD} + depends_on: + - db + + db: + image: postgres:latest + container_name: postgres + restart: always + volumes: + - pong_pg_data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - app-network + environment: + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + #healthcheck: + #test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + #interval: 10s + #timeout: 5s + #retries: 5 + + es01: + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} + container_name: es01 + volumes: + - certs:/usr/share/elasticsearch/config/certs + - pong_logstash_data_01:/usr/share/elasticsearch/data + ports: + - "127.0.0.1:9200:9200" + networks: + - app-network + environment: + - node.name=es01 + - cluster.name=${CLUSTER_NAME} + - discovery.type=single-node + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - bootstrap.memory_lock=true + - xpack.security.enabled=true + - xpack.security.http.ssl.enabled=true + - xpack.security.http.ssl.key=certs/es01/es01.key + - xpack.security.http.ssl.certificate=certs/es01/es01.crt + - xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt + - xpack.security.transport.ssl.enabled=true + - xpack.security.transport.ssl.key=certs/es01/es01.key + - xpack.security.transport.ssl.certificate=certs/es01/es01.crt + - xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt + - xpack.security.transport.ssl.verification_mode=certificate + - xpack.license.self_generated.type=${LICENSE} + depends_on: + - logstash01 + + logstash01: + image: docker.elastic.co/logstash/logstash:${STACK_VERSION} + container_name: logstash01 + volumes: + - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro + ports: + - "5044:5044" + networks: + - app-network + environment: + - ELASTIC_HOSTS=http://es01:9200 + - ELASTIC_USER=${ELASTIC_USERNAME} + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - xpack.monitoring.enabled=false + + kibana: + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} + container_name: kibana + volumes: + - pong_kibana:/usr/share/kibana/data + user: "1000:1000" + ports: + - 5601:5601 + networks: + - app-network + environment: + - SERVERNAME=pong.kibana.org + - ELASTICSEARCH_HOSTS=http://es01:9200 + - ELASTICSEARCH_USERNAME=${ELASTIC_USERNAME} + - ELASTICSEARCH_PASSWORD=${ELASTIC_PASSWORD} + depends_on: + - es01 + #healthcheck: + #test: ["CMD-SHELL", "curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'"] + #interval: 20s + #timeout: 10s + #retries: 120 + + filebeat01: + depends_on: + - es01 + image: docker.elastic.co/beats/filebeat:${STACK_VERSION} + volumes: + - pong_filebeat_data_01:/usr/share/filebeat/data + - ./filebeat.yml:/usr/share/filebeat/filebeat.yml:ro + - /var/lib/docker/containers:/var/lib/docker/containers:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + environment: + - ELASTIC_USER=elastic + - ELASTIC_PASSWORD=${ELASTIC_PASSWORD} + - ELASTIC_HOSTS=https://es01:9200 + - KIBANA_HOSTS=http://kibana:5601 + - LOGSTASH_HOSTS=http://logstash01:9600 + networks: + - app-network + +volumes: + certs: + driver: local + pong: + driver: local + driver_opts: + type: none + device: ${PROJECT_PATH} + o: bind + pong_pg_data: + driver: local + pong_es_data_01: + driver: local + pong_kibana: + driver: local + pong_logstash_data_01: + driver: local + pong_filebeat_data_01: + driver: local + +networks: + app-network: + driver: bridge diff --git a/env_template b/env_template index a22175d..ec4d245 100644 --- a/env_template +++ b/env_template @@ -1,5 +1,5 @@ # Django settings -SECRET_KEY= +SECRET_KEY="FollowTheWhiteRabbit" DEBUG=True DJANGO_ALLOWED_HOSTS=['*'] @@ -13,3 +13,23 @@ DB_PORT=5432 PROJECT_PATH=${PWD}/pong POSTGRES_DATA_PATH=${PWD}/data/db +ES_DATA_PATH=${PWD}/data/es +KIBA_DATA_PATH=${PWD}/data/kiba +LSTASH_DATA_PATH=${PWD}/data/lstash + +# ElasticSearch settings +STACK_VERSION=8.14.3 +CLUSTER_NAME=docker-cluster +LICENSE=basic + +ELASTIC_PASSWORD= +ES_PORT=9200 + +# Kibana settings +KIBANA_PASSWORD= +KIBANA_PORT=5601 + +ES_MEM_LIMIT=1073741824 +KB_MEM_LIMIT=1073741824 +LS_MEM_LIMIT=1073741824 + diff --git a/filebeat.yml b/filebeat.yml new file mode 100644 index 0000000..3082ccb --- /dev/null +++ b/filebeat.yml @@ -0,0 +1,10 @@ +filebeat.inputs: +- type: docker + containers.ids: + - "*" + +processors: +- add_docker_metadata: ~ + +output.logstash: + hosts: ["http://logstash01:5044"] diff --git a/logstash.conf b/logstash.conf new file mode 100644 index 0000000..4872c61 --- /dev/null +++ b/logstash.conf @@ -0,0 +1,21 @@ +input { + file { + path => "/var/lib/docker/containers/*/*.log" + start_position => "beginning" + sincedb_path => "/usr/share/logstash/data/sincedb" + type => "docker" + codec => "json" + } +} + +filter { +} + +output { + elasticsearch { + hosts => ["http://es01:9200"] + index => "docker-logs-%{+YYYY.MM.dd}" + user=> "${ELASTIC_USER}" + password=> "${ELASTIC_PASSWORD}" + } +} diff --git a/makefile b/makefile index ecb19b5..0a07cd8 100644 --- a/makefile +++ b/makefile @@ -2,12 +2,11 @@ COMPOSE_FILE=docker-compose.yaml COMPOSE=docker compose -f $(COMPOSE_FILE) CONTAINER=$(c) -up: down - sudo mkdir -p data/db +up: $(COMPOSE) build - $(COMPOSE) up $(CONTAINER) + $(COMPOSE) up -build: +build: $(COMPOSE) build $(CONTAINER) start: @@ -21,9 +20,8 @@ down: destroy: $(COMPOSE) down -v --rmi all - sudo rm -rf data - #sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true - #sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true + sudo lsof -i :5432 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true + sudo lsof -i :80 | awk 'NR>1 {print $$2}' | xargs sudo kill -9 || true logs: $(COMPOSE) logs -f $(CONTAINER) diff --git a/pong/game/urls.py b/pong/game/urls.py index 8da5f37..e27f293 100644 --- a/pong/game/urls.py +++ b/pong/game/urls.py @@ -14,6 +14,7 @@ urlpatterns = [ path('check_user_exists/', views.check_user_exists, name='check_user_exists'), path('register_user/', views.register_user, name='register_user'), path('authenticate_user/', views.authenticate_user, name='authenticate_user'), + path('web3/', views.read_data, name='read_data'), path('players/', player_list, name='player_list'), path('matches/', match_list, name='match_list'), path('tournois/', tournoi_list, name='tournoi_list'), diff --git a/pong/game/views.py b/pong/game/views.py index 67fe699..be12945 100644 --- a/pong/game/views.py +++ b/pong/game/views.py @@ -116,3 +116,73 @@ def player_list_json(request): ####################### THEOUCHE PART ############################ + + + +####################### jcheca PART ############################ + +from web3 import Web3 + +provider = Web3.HTTPProvider("https://sepolia.infura.io/v3/60e51df7c97c4f4c8ab41605a4eb9907") +web3 = Web3(provider) +eth_gas_price = web3.eth.gas_price/1000000000 +print(eth_gas_price) + +contract_address = "0x078D04Eb6fb97Cd863361FC86000647DC876441B" +contract_abi = [{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"string","name":"_name","type":"string"},{"internalType":"uint256","name":"_timecode","type":"uint256"},{"internalType":"uint256","name":"_participantCount","type":"uint256"},{"internalType":"string[]","name":"_playerPseudonyms","type":"string[]"},{"internalType":"string[]","name":"_finalOrder","type":"string[]"}],"name":"addTournament","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getAllTournaments","outputs":[{"components":[{"internalType":"uint256","name":"id","type":"uint256"},{"internalType":"string","name":"name","type":"string"},{"internalType":"uint256","name":"timecode","type":"uint256"},{"internalType":"uint256","name":"participantCount","type":"uint256"},{"internalType":"string[]","name":"playerPseudonyms","type":"string[]"},{"internalType":"string[]","name":"finalOrder","type":"string[]"}],"internalType":"struct PongTournament.Tournament[]","name":"","type":"tuple[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_id","type":"uint256"}],"name":"getTournament","outputs":[{"components":[{"internalType":"uint256","name":"id","type":"uint256"},{"internalType":"string","name":"name","type":"string"},{"internalType":"uint256","name":"timecode","type":"uint256"},{"internalType":"uint256","name":"participantCount","type":"uint256"},{"internalType":"string[]","name":"playerPseudonyms","type":"string[]"},{"internalType":"string[]","name":"finalOrder","type":"string[]"}],"internalType":"struct PongTournament.Tournament","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"tournamentCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"tournaments","outputs":[{"internalType":"uint256","name":"id","type":"uint256"},{"internalType":"string","name":"name","type":"string"},{"internalType":"uint256","name":"timecode","type":"uint256"},{"internalType":"uint256","name":"participantCount","type":"uint256"}],"stateMutability":"view","type":"function"}] + +contract = web3.eth.contract(address=contract_address, abi=contract_abi) + +def read_data(request): + # Créer une instance du contrat + + # Appeler une fonction du contrat pour obtenir tous les tournois + tournaments = contract.functions.getAllTournaments().call() + + # Afficher les résultats + json_data = [] + for tournament in tournaments: + tournament_data = [] + for item in tournament: + print(f"{item}") + tournament_data.append(item) + json_data.append(tournament_data) + + # Retourner le JSON comme réponse HTTP + # print(f"Tournament ID: {tournament[0]}") + # print(f"Name: {tournament[1]}") + # print(f"Timecode: {tournament[2]}") + # print(f"Participant Count: {tournament[3]}") + # print(f"Player Pseudonyms: {', '.join(tournament[4])}") + # print(f"Final Order: {', '.join(tournament[5])}") + print("-----------------------------") + return JsonResponse(json_data, safe=False) + + +def write_data(request): + # addTournament(string,uint256,uint256,string[],string[]) + + # # Configuration de la transaction pour la fonction store + # account = "0x66CeBE2A1F7dae0F6AdBAad2c15A56A9121abfEf" + # private_key = "beb16ee3434ec5abec8b799549846cc04443c967b8d3643b943e2e969e7d25be" + + # nonce = web3.eth.get_transaction_count(account) + # transaction = contract.functions.addTournament("test",1721830559,6,["aaudeber", "tlorne", "ocassany", "yestello", "jcheca", "toto"],["toto", "jcheca", "yestello", "tlorne", "ocassany", "aaudeber"]).build_transaction({ + # 'chainId': 11155111, # ID de la chaîne Sepolia + # 'gas': 2000000, + # 'gasPrice': web3.to_wei(eth_gas_price, 'gwei'), + # 'nonce': nonce + # }) + + # # Signature de la transaction + # signed_txn = web3.eth.account.sign_transaction(transaction, private_key) + + # # Envoi de la transaction + # tx_hash = web3.eth.send_raw_transaction(signed_txn.rawTransaction) + # print("Transaction hash:", web3.to_hex(tx_hash)) + + # # Attente de la confirmation de la transaction + # tx_receipt = web3.eth.wait_for_transaction_receipt(tx_hash) + # print("Transaction receipt:", tx_receipt) + print("-----------------------------") + diff --git a/pong/static/flags/de.svg b/pong/static/flags/de.svg new file mode 100755 index 0000000..61e64d0 --- /dev/null +++ b/pong/static/flags/de.svg @@ -0,0 +1,7 @@ + diff --git a/pong/static/flags/es.svg b/pong/static/flags/es.svg new file mode 100755 index 0000000..ce2e1a8 --- /dev/null +++ b/pong/static/flags/es.svg @@ -0,0 +1,712 @@ + diff --git a/pong/static/flags/fr.svg b/pong/static/flags/fr.svg new file mode 100755 index 0000000..240e789 --- /dev/null +++ b/pong/static/flags/fr.svg @@ -0,0 +1,7 @@ + diff --git a/pong/static/flags/it.svg b/pong/static/flags/it.svg new file mode 100755 index 0000000..038afd9 --- /dev/null +++ b/pong/static/flags/it.svg @@ -0,0 +1,7 @@ + diff --git a/pong/static/flags/us.svg b/pong/static/flags/us.svg new file mode 100755 index 0000000..8a8fb5c --- /dev/null +++ b/pong/static/flags/us.svg @@ -0,0 +1,57 @@ + diff --git a/pong/static/index.html b/pong/static/index.html index 9aae525..84de3dc 100644 --- a/pong/static/index.html +++ b/pong/static/index.html @@ -1,6 +1,6 @@ {% load static %} - +
@@ -10,9 +10,117 @@