dev
This commit is contained in:
parent
b602a5c53f
commit
070c26450c
@ -1,7 +1,12 @@
|
|||||||
# must be the same that SITE_VERSION define in php-fpm/.env
|
# compose project name. A docker compose project has as its name
|
||||||
|
# the name of the folder where it is executed (default).
|
||||||
|
# This setting overrides the default
|
||||||
|
COMPOSE_PROJECT_NAME=gmo-loki
|
||||||
|
# must be the same that SITE_VERSION
|
||||||
|
# define in php-fpm/.env
|
||||||
DOCKER_IMAGE_VERSION="3.25"
|
DOCKER_IMAGE_VERSION="3.25"
|
||||||
# docker image tag = DOCKER_IMAGE_VERSION
|
# docker image tag = DOCKER_IMAGE_VERSION
|
||||||
DOCKER_IMAGE=gmouchet/wwwgmo-php-fpm
|
DOCKER_IMAGE=gmouchet/wwwgmo-php-fpm
|
||||||
NGINX_PORT=8085
|
NGINX_PORT=8085
|
||||||
PMA_PORT_WEB=8084
|
PMA_PORT_WEB=8084
|
||||||
ELASTIC_VERSION=7.17.19
|
ELASTIC_VERSION=8.18.2
|
||||||
390
README.md
390
README.md
@ -216,3 +216,393 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.18/docker.html
|
|||||||
### 1.0 (2024-03-01)
|
### 1.0 (2024-03-01)
|
||||||
* Created from scratch
|
* Created from scratch
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Ignorer et passer au contenu
|
||||||
|
Vous avez dit :
|
||||||
|
J'ai l'erreur suivante lorsque j'exécute mon ficher docker-compose.yml:
|
||||||
|
[2025-06-19T16:58:29.674+00:00][ERROR][elasticsearch-service] Unable to retrieve version information from Elasticsearch nodes. security_exception
|
||||||
|
Root causes:
|
||||||
|
security_exception: missing authentication credentials for REST request [/_nodes?filter_path=nodes.*.version%2Cnodes.*.http.publish_address%2Cnodes.*.ip]
|
||||||
|
|
||||||
|
Mon fichier docker-compose.yml
|
||||||
|
elasticsearch:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:8.18.2
|
||||||
|
container_name: elasticsearch
|
||||||
|
environment:
|
||||||
|
- node.name=es01
|
||||||
|
- cluster.name=es-docker-cluster
|
||||||
|
- discovery.type=single-node
|
||||||
|
- bootstrap.memory_lock=true
|
||||||
|
- xpack.security.enabled=true
|
||||||
|
- ELASTIC_PASSWORD=changeme
|
||||||
|
- xpack.security.http.ssl.enabled=false
|
||||||
|
- ES_JAVA_OPTS=-Xms1g -Xmx1g
|
||||||
|
ulimits:
|
||||||
|
memlock:
|
||||||
|
soft: -1
|
||||||
|
hard: -1
|
||||||
|
ports:
|
||||||
|
- "9200:9200"
|
||||||
|
- "9300:9300"
|
||||||
|
volumes:
|
||||||
|
# - esdata:/usr/share/elasticsearch/data
|
||||||
|
- wwwgmo-elasticsearch:/usr/share/elasticsearch/data
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- xpack.security.enabled=false
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
ChatGPT a dit :
|
||||||
|
L’erreur que vous rencontrez est causée par l’activation de la sécurité dans Elasticsearch (xpack.security.enabled=true) sans fournir d’identifiants d’authentification lors de la tentative d’accès à l'API REST.
|
||||||
|
|
||||||
|
🔍 Détail de l'erreur :
|
||||||
|
security_exception: missing authentication credentials for REST request [/_nodes?...]
|
||||||
|
Cela signifie que Kibana ou un autre service tente de communiquer avec Elasticsearch sans credentials, alors que ce dernier requiert une authentification.
|
||||||
|
|
||||||
|
✅ Solution 1 : Aligner la sécurité entre Kibana et Elasticsearch
|
||||||
|
|
||||||
|
Actuellement :
|
||||||
|
|
||||||
|
Elasticsearch a xpack.security.enabled=true
|
||||||
|
Kibana a xpack.security.enabled=false
|
||||||
|
Ce n’est pas compatible. Kibana doit être configuré pour utiliser les identifiants quand Elasticsearch a la sécurité activée.
|
||||||
|
|
||||||
|
🛠 Modifier la section kibana comme suit :
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- ELASTICSEARCH_USERNAME=elastic
|
||||||
|
- ELASTICSEARCH_PASSWORD=changeme
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
⚠️ Important : On utilise ici ELASTICSEARCH_USERNAME=elastic et ELASTICSEARCH_PASSWORD=changeme, car vous avez défini ce mot de passe dans la variable d'environnement ELASTIC_PASSWORD côté Elasticsearch.
|
||||||
|
✅ Solution 2 : Désactiver complètement la sécurité (pour développement uniquement)
|
||||||
|
|
||||||
|
Si vous êtes en environnement de test ou de développement local, vous pouvez désactiver la sécurité côté Elasticsearch.
|
||||||
|
|
||||||
|
environment:
|
||||||
|
- xpack.security.enabled=false
|
||||||
|
Dans ce cas, gardez aussi Kibana sans sécurité :
|
||||||
|
|
||||||
|
- xpack.security.enabled=false
|
||||||
|
⚠️ Pas recommandé en production : cette approche désactive les protections d’accès aux données.
|
||||||
|
🧪 Astuce : Tester si tout fonctionne
|
||||||
|
Une fois les containers relancés, testez l’accès à Elasticsearch :
|
||||||
|
|
||||||
|
curl -u elastic:changeme http://localhost:9200
|
||||||
|
Et pour Kibana : ouvrez http://localhost:5601 dans un navigateur.
|
||||||
|
|
||||||
|
Souhaitez-vous :
|
||||||
|
|
||||||
|
Garder la sécurité activée et intégrer les credentials dans tous les services ?
|
||||||
|
Ou désactiver la sécurité pour un usage local plus simple ?
|
||||||
|
Vous avez dit :
|
||||||
|
Garder la sécurité activée et intégrer les credentials dans tous les services
|
||||||
|
ChatGPT a dit :
|
||||||
|
Parfait ! Si vous souhaitez garder la sécurité activée, alors il faut que tous les services clients (comme Kibana) s’authentifient auprès d’Elasticsearch.
|
||||||
|
|
||||||
|
Voici comment ajuster correctement votre docker-compose.yml pour intégrer les identifiants.
|
||||||
|
|
||||||
|
✅ Fichier docker-compose.yml corrigé
|
||||||
|
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
elasticsearch:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:8.18.2
|
||||||
|
container_name: elasticsearch
|
||||||
|
environment:
|
||||||
|
- node.name=es01
|
||||||
|
- cluster.name=es-docker-cluster
|
||||||
|
- discovery.type=single-node
|
||||||
|
- bootstrap.memory_lock=true
|
||||||
|
- xpack.security.enabled=true
|
||||||
|
- xpack.security.http.ssl.enabled=false
|
||||||
|
- ELASTIC_PASSWORD=changeme
|
||||||
|
- ES_JAVA_OPTS=-Xms1g -Xmx1g
|
||||||
|
ulimits:
|
||||||
|
memlock:
|
||||||
|
soft: -1
|
||||||
|
hard: -1
|
||||||
|
ports:
|
||||||
|
- "9200:9200"
|
||||||
|
- "9300:9300"
|
||||||
|
volumes:
|
||||||
|
- wwwgmo-elasticsearch:/usr/share/elasticsearch/data
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- ELASTICSEARCH_USERNAME=elastic
|
||||||
|
- ELASTICSEARCH_PASSWORD=changeme
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
wwwgmo-elasticsearch:
|
||||||
|
🔐 Authentification à l'API Elasticsearch (test via curl)
|
||||||
|
|
||||||
|
Une fois vos conteneurs démarrés, testez si l'accès avec mot de passe fonctionne :
|
||||||
|
|
||||||
|
curl -u elastic:changeme http://localhost:9200
|
||||||
|
Réponse attendue :
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "es01",
|
||||||
|
"cluster_name": "es-docker-cluster",
|
||||||
|
"cluster_uuid": "...",
|
||||||
|
"version": {
|
||||||
|
"number": "8.18.2",
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"tagline": "You Know, for Search"
|
||||||
|
}
|
||||||
|
✅ Accès à Kibana
|
||||||
|
|
||||||
|
Ouvrez http://localhost:5601 dans votre navigateur. Kibana devrait démarrer sans vous afficher d'erreur liée à la connexion Elasticsearch.
|
||||||
|
|
||||||
|
✅ Recommandation de sécurité (optionnelle)
|
||||||
|
|
||||||
|
Pour un environnement plus sûr :
|
||||||
|
|
||||||
|
Créez un nouvel utilisateur avec des permissions spécifiques pour Kibana, via l’API ou l’interface d’admin de Kibana.
|
||||||
|
Évitez d'utiliser elastic en production, car c’est un super admin.
|
||||||
|
Souhaitez-vous un exemple de script pour créer un utilisateur avec rôle personnalisé ?
|
||||||
|
|
||||||
|
Vous avez dit :
|
||||||
|
Error: [config validation of [elasticsearch].username]: value of "elastic" is forbidden. This is a superuser account that cannot write to system indices that Kibana needs to function. Use a service account token instead. Learn more: https://www.elastic.co/guide/en/elasticsearch/reference/8.0/service-accounts.html
|
||||||
|
at ensureValidConfiguration (/usr/share/kibana/node_modules/@kbn/core-config-server-internal/src/ensure_valid_configuration.js:44:11)
|
||||||
|
at Server.preboot (/usr/share/kibana/node_modules/@kbn/core-root-server-internal/src/server.js:181:7)
|
||||||
|
at Root.preboot (/usr/share/kibana/node_modules/@kbn/core-root-server-internal/src/root/index.js:48:14)
|
||||||
|
at bootstrap (/usr/share/kibana/node_modules/@kbn/core-root-server-internal/src/bootstrap.js:96:29)
|
||||||
|
at Command.<anonymous> (/usr/share/kibana/src/cli/serve/serve.js:235:5)
|
||||||
|
ChatGPT a dit :
|
||||||
|
Merci pour ce message d’erreur très clair — Kibana refuse catégoriquement d'utiliser l'utilisateur elastic à partir d'Elasticsearch 8, car :
|
||||||
|
|
||||||
|
❌ elastic est un super-utilisateur. Il ne peut pas être utilisé pour que Kibana écrive dans les indices système, ce qui est nécessaire pour qu'il fonctionne correctement.
|
||||||
|
✅ Solution recommandée : utiliser un service account token
|
||||||
|
|
||||||
|
🔐 Étapes pour créer un Service Account Token pour Kibana :
|
||||||
|
Lancer Elasticsearch avec sécurité activée
|
||||||
|
Gardez xpack.security.enabled=true
|
||||||
|
Assurez-vous que le conteneur Elasticsearch fonctionne
|
||||||
|
Créer un Service Account Token pour Kibana
|
||||||
|
Ouvrez un terminal et exécutez :
|
||||||
|
docker exec -it elasticsearch \
|
||||||
|
bin/elasticsearch-service-tokens create kibana kibana-token
|
||||||
|
Cela génère un token dans ce format :
|
||||||
|
AAEAAWVsYXN0aWMvY2liYW5hL2tpYmFuYS10b2tlbjoxR3VOSVZzUjZsb3pk...
|
||||||
|
Mettre à jour votre docker-compose.yml pour Kibana
|
||||||
|
Supprimez ELASTICSEARCH_USERNAME et ELASTICSEARCH_PASSWORD, et utilisez ELASTICSEARCH_SERVICEACCOUNTTOKEN à la place :
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- ELASTICSEARCH_SERVICEACCOUNTTOKEN=AAEAAWVsYXN0aWMvY2liYW5hL2tpYmFuYS10b2tlbjoxR3VOSVZzUjZsb3pk...
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
(remplace le token par celui généré précédemment)
|
||||||
|
🔄 Ensuite :
|
||||||
|
|
||||||
|
Redémarre les services :
|
||||||
|
docker-compose down
|
||||||
|
docker-compose up -d
|
||||||
|
Va sur http://localhost:5601 pour voir Kibana se connecter correctement.
|
||||||
|
🔎 Besoin d'aller plus loin ?
|
||||||
|
|
||||||
|
Si tu préfères créer un utilisateur dédié à Kibana au lieu d’un token de service, je peux aussi te guider pas à pas avec elasticsearch-users ou via l'API REST.
|
||||||
|
|
||||||
|
Souhaites-tu cette alternative ?
|
||||||
|
|
||||||
|
Vous avez dit :
|
||||||
|
oui
|
||||||
|
ChatGPT a dit :
|
||||||
|
Parfait ! Voici comment créer un utilisateur dédié à Kibana, via l'API REST d'Elasticsearch, avec des rôles appropriés pour permettre à Kibana de fonctionner sans utiliser le super-utilisateur elastic.
|
||||||
|
|
||||||
|
🎯 Objectif
|
||||||
|
|
||||||
|
Créer un utilisateur, par exemple kibana_system_user, qui a le rôle kibana_system (prérequis minimum pour que Kibana fonctionne).
|
||||||
|
|
||||||
|
✅ Étapes détaillées
|
||||||
|
|
||||||
|
1. 🔐 Authentifiez-vous avec le super-utilisateur elastic
|
||||||
|
Exécute cette commande pour vérifier que tu peux parler à Elasticsearch :
|
||||||
|
|
||||||
|
curl -u elastic:changeme http://localhost:9200
|
||||||
|
2. 👤 Créer un nouvel utilisateur kibana_system_user
|
||||||
|
curl -X POST http://localhost:9200/_security/user/kibana_system_user \
|
||||||
|
-u elastic:changeme \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"password" : "kibanapassword",
|
||||||
|
"roles" : [ "kibana_system" ],
|
||||||
|
"full_name" : "Kibana System User",
|
||||||
|
"email" : "kibana@example.com"
|
||||||
|
}'
|
||||||
|
⚠️ Le mot de passe ici est kibanapassword. Tu peux le changer selon tes préférences.
|
||||||
|
|
||||||
|
3. 🔧 Modifier docker-compose.yml pour Kibana
|
||||||
|
Mets à jour la section Kibana avec ces variables d’environnement :
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- ELASTICSEARCH_USERNAME=kibana_system_user
|
||||||
|
- ELASTICSEARCH_PASSWORD=kibanapassword
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
4. 🔄 Redémarre les services
|
||||||
|
docker-compose down
|
||||||
|
docker-compose up -d
|
||||||
|
5. ✅ Accès à Kibana
|
||||||
|
Rends-toi sur http://localhost:5601. Kibana devrait démarrer sans erreur de permission, car il utilise désormais un utilisateur restreint, mais autorisé à accéder aux indices système nécessaires.
|
||||||
|
|
||||||
|
✅ Résumé
|
||||||
|
|
||||||
|
Élément Valeur
|
||||||
|
Utilisateur Elasticsearch kibana_system_user
|
||||||
|
Rôle attribué kibana_system
|
||||||
|
Mot de passe kibanapassword (ou autre)
|
||||||
|
Avantage Bonne pratique, sécurisé
|
||||||
|
Souhaites-tu aussi créer un utilisateur pour ingester des données dans Elasticsearch via une application ?
|
||||||
|
|
||||||
|
Vous avez dit :
|
||||||
|
et tout ca dans un fichier docker-compose
|
||||||
|
ChatGPT a dit :
|
||||||
|
Parfait, je vais t’aider à tout intégrer dans un seul docker-compose.yml, avec :
|
||||||
|
|
||||||
|
Elasticsearch sécurisé
|
||||||
|
Création automatique de l’utilisateur kibana_system_user avec le rôle kibana_system
|
||||||
|
Kibana configuré pour utiliser cet utilisateur
|
||||||
|
⚠️ Important :
|
||||||
|
Elasticsearch ne permet pas nativement la création d'utilisateurs via le docker-compose.yml, donc on doit ajouter un script d'initialisation personnalisé monté dans le conteneur, qui s'exécute au démarrage.
|
||||||
|
|
||||||
|
✅ Structure du projet recommandée :
|
||||||
|
|
||||||
|
project/
|
||||||
|
├── docker-compose.yml
|
||||||
|
├── elasticsearch/
|
||||||
|
│ └── create_kibana_user.sh
|
||||||
|
🔧 elasticsearch/create_kibana_user.sh (script d'init)
|
||||||
|
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Attendre qu'Elasticsearch soit démarré
|
||||||
|
echo "⏳ Attente du démarrage d'Elasticsearch..."
|
||||||
|
until curl -u elastic:changeme -s http://localhost:9200 >/dev/null; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✅ Elasticsearch est prêt. Création de l'utilisateur Kibana..."
|
||||||
|
|
||||||
|
# Créer un utilisateur pour Kibana
|
||||||
|
curl -X POST http://localhost:9200/_security/user/kibana_system_user \
|
||||||
|
-u elastic:changeme \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"password" : "kibanapassword",
|
||||||
|
"roles" : [ "kibana_system" ],
|
||||||
|
"full_name" : "Kibana System User"
|
||||||
|
}'
|
||||||
|
|
||||||
|
echo "✅ Utilisateur Kibana créé avec succès."
|
||||||
|
Donne les permissions au script :
|
||||||
|
chmod +x elasticsearch/create_kibana_user.sh
|
||||||
|
📦 docker-compose.yml complet
|
||||||
|
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
elasticsearch:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:8.18.2
|
||||||
|
container_name: elasticsearch
|
||||||
|
environment:
|
||||||
|
- node.name=es01
|
||||||
|
- cluster.name=es-docker-cluster
|
||||||
|
- discovery.type=single-node
|
||||||
|
- bootstrap.memory_lock=true
|
||||||
|
- xpack.security.enabled=true
|
||||||
|
- xpack.security.http.ssl.enabled=false
|
||||||
|
- ELASTIC_PASSWORD=changeme
|
||||||
|
- ES_JAVA_OPTS=-Xms1g -Xmx1g
|
||||||
|
ulimits:
|
||||||
|
memlock:
|
||||||
|
soft: -1
|
||||||
|
hard: -1
|
||||||
|
ports:
|
||||||
|
- "9200:9200"
|
||||||
|
- "9300:9300"
|
||||||
|
volumes:
|
||||||
|
- wwwgmo-elasticsearch:/usr/share/elasticsearch/data
|
||||||
|
- ./elasticsearch/create_kibana_user.sh:/usr/local/bin/create_kibana_user.sh
|
||||||
|
command: >
|
||||||
|
bash -c "
|
||||||
|
/bin/tini -g -- /usr/local/bin/docker-entrypoint.sh &
|
||||||
|
pid=$!;
|
||||||
|
sleep 20;
|
||||||
|
bash /usr/local/bin/create_kibana_user.sh;
|
||||||
|
wait $pid
|
||||||
|
"
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- ELASTICSEARCH_USERNAME=kibana_system_user
|
||||||
|
- ELASTICSEARCH_PASSWORD=kibanapassword
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
wwwgmo-elasticsearch:
|
||||||
|
✅ Lancer le tout
|
||||||
|
|
||||||
|
Dans ton terminal :
|
||||||
|
|
||||||
|
docker-compose down -v
|
||||||
|
docker-compose up --build
|
||||||
|
✅ Résultat attendu
|
||||||
|
|
||||||
|
Elasticsearch démarre avec la sécurité activée
|
||||||
|
Un script crée automatiquement un utilisateur kibana_system_user
|
||||||
|
Kibana l'utilise pour se connecter
|
||||||
|
Tout est prêt et fonctionnel sans intervention manuelle
|
||||||
|
Souhaites-tu que je t’aide à ajouter aussi un utilisateur applicatif (ex: my_app_user) avec accès en lecture/écriture ?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Joindre
|
||||||
|
|
||||||
|
Rechercher
|
||||||
|
|
||||||
|
Voix
|
||||||
|
|
||||||
|
ChatGPT peut faire des erreurs. Envisagez de vérifier les informations importantes.
|
||||||
|
|||||||
@ -1,74 +1,64 @@
|
|||||||
# GMo Lab
|
|
||||||
#version: '2.3'
|
|
||||||
services:
|
services:
|
||||||
|
# Linux nginx mysql php
|
||||||
## Linux nginx mysql php
|
# server http nginx
|
||||||
# wwwgmo-nginx:
|
nginx:
|
||||||
# container_name: wwwgmo-nginx
|
# container_name: wwwgmo-nginx
|
||||||
# hostname: wwwgmo-nginx
|
# hostname: wwwgmo-nginx
|
||||||
# image: nginxinc/nginx-unprivileged:1.23-alpine
|
image: nginxinc/nginx-unprivileged:1.23-alpine
|
||||||
# volumes:
|
volumes:
|
||||||
# - './docker/nginx/default.conf:/etc/nginx/conf.d/default.conf'
|
- './docker/nginx/default.conf:/etc/nginx/conf.d/default.conf'
|
||||||
# - './certs:/etc/nginx/certs/'
|
- './certs:/etc/nginx/certs/'
|
||||||
# - './src:/var/www/html:rw,cached'
|
- './src:/var/www/html:rw,cached'
|
||||||
# ports:
|
ports:
|
||||||
# - '${NGINX_PORT}:8080' #local:docker
|
- '${NGINX_PORT}:8080' #local:docker
|
||||||
# depends_on:
|
# depends_on:
|
||||||
# - wwwgmo-php-fpm
|
# - wwwgmo-php-fpm
|
||||||
##
|
##
|
||||||
# wwwgmo-php-fpm:
|
php-fpm:
|
||||||
# container_name: wwwgmo-php-fpm
|
# container_name: wwwgmo-php-fpm
|
||||||
# hostname: wwwgmo-php-fpm
|
# hostname: wwwgmo-php-fpm
|
||||||
# #image: wwwgmo
|
# #image: wwwgmo
|
||||||
# image: ${DOCKER_IMAGE}:${DOCKER_IMAGE_VERSION}
|
image: ${DOCKER_IMAGE}:${DOCKER_IMAGE_VERSION}
|
||||||
# env_file:
|
env_file:
|
||||||
# - ./docker/php-fpm/.env
|
- ./docker/php-fpm/.env
|
||||||
# volumes:
|
volumes:
|
||||||
# - './src/:/var/www/html:rw,cached'
|
- './src/:/var/www/html:rw,cached'
|
||||||
# build:
|
build:
|
||||||
# context: .
|
context: .
|
||||||
# dockerfile: ./docker/php-fpm/Dockerfile
|
dockerfile: ./docker/php-fpm/Dockerfile
|
||||||
# ports:
|
ports:
|
||||||
# - '9000:9000' #local:docker
|
- '9000:9000' #local:docker
|
||||||
# depends_on:
|
depends_on:
|
||||||
# - wwwgmo-mariadb
|
- mariadb
|
||||||
#
|
|
||||||
# wwwgmo-mariadb:
|
mariadb:
|
||||||
# container_name: wwwgmo-mariadb
|
# container_name: wwwgmo-mariadb
|
||||||
# hostname: wwwgmo-mariadb
|
# hostname: wwwgmo-mariadb
|
||||||
# #image: mysql:8.0-debian
|
#image: mysql:8.0-debian
|
||||||
# #image: mysql/mysql-server:8.0.27-aarch64
|
#image: mysql/mysql-server:8.0.27-aarch64
|
||||||
# image: mariadb:10.11.7
|
image: mariadb:10.11.7
|
||||||
# volumes:
|
volumes:
|
||||||
# - 'wwwgmo-mariadb:/var/lib/mysql:z'
|
- 'wwwgmo-mariadb:/var/lib/mysql:z'
|
||||||
# env_file:
|
env_file:
|
||||||
# - ./docker/mariadb/.env # ports:
|
- ./docker/mariadb/.env # ports:
|
||||||
#
|
|
||||||
# wwwgmo-phpmyadmin:
|
phpmyadmin:
|
||||||
# container_name: wwwgmo-pma
|
# container_name: wwwgmo-pma
|
||||||
# image: phpmyadmin
|
image: phpmyadmin
|
||||||
# links:
|
links:
|
||||||
# - wwwgmo-mariadb
|
- mariadb
|
||||||
# env_file:
|
env_file:
|
||||||
# - ./docker/mariadb/.env
|
- ./docker/mariadb/.env
|
||||||
# restart: always
|
restart: always
|
||||||
# ports:
|
ports:
|
||||||
# - ${PMA_PORT_WEB}:80
|
- ${PMA_PORT_WEB}:80
|
||||||
#
|
#
|
||||||
## EK
|
# elasticsearch
|
||||||
|
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.18.2
|
image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}
|
||||||
container_name: elasticsearch
|
#container_name: elasticsearch
|
||||||
environment:
|
env_file:
|
||||||
- node.name=es01
|
- ./docker/elasticsearch/.env
|
||||||
- cluster.name=es-docker-cluster
|
|
||||||
- discovery.type=single-node
|
|
||||||
- bootstrap.memory_lock=true
|
|
||||||
- xpack.security.enabled=true
|
|
||||||
- ELASTIC_PASSWORD=changeme
|
|
||||||
- xpack.security.http.ssl.enabled=false
|
|
||||||
- ES_JAVA_OPTS=-Xms1g -Xmx1g
|
|
||||||
ulimits:
|
ulimits:
|
||||||
memlock:
|
memlock:
|
||||||
soft: -1
|
soft: -1
|
||||||
@ -77,58 +67,29 @@ services:
|
|||||||
- "9200:9200"
|
- "9200:9200"
|
||||||
- "9300:9300"
|
- "9300:9300"
|
||||||
volumes:
|
volumes:
|
||||||
- esdata:/usr/share/elasticsearch/data
|
- wwwgmo-elasticsearch:/usr/share/elasticsearch/data
|
||||||
# - 'wwwgmo-elasticsearch:/usr/share/elasticsearch/data'
|
- ./docker/elasticsearch/create_kibana_user.sh:/usr/local/bin/create_kibana_user.sh
|
||||||
|
command: >
|
||||||
|
bash -c "
|
||||||
|
/bin/tini -g -- /usr/local/bin/docker-entrypoint.sh &
|
||||||
|
pid=$!;
|
||||||
|
sleep 20;
|
||||||
|
bash /usr/local/bin/create_kibana_user.sh;
|
||||||
|
wait $pid
|
||||||
|
"
|
||||||
|
# kibana
|
||||||
kibana:
|
kibana:
|
||||||
image: docker.elastic.co/kibana/kibana:8.18.2
|
image: docker.elastic.co/kibana/kibana:${ELASTIC_VERSION}
|
||||||
container_name: kibana
|
#container_name: kibana
|
||||||
|
env_file:
|
||||||
|
- ./docker/kibana/.env
|
||||||
depends_on:
|
depends_on:
|
||||||
- elasticsearch
|
- elasticsearch
|
||||||
environment:
|
|
||||||
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
|
||||||
- xpack.security.enabled=false
|
|
||||||
ports:
|
ports:
|
||||||
- "5601:5601"
|
- "5601:5601"
|
||||||
|
|
||||||
|
|
||||||
# wwwgmo-elasticsearch:
|
|
||||||
# container_name: wwwgmo-elasticsearch
|
|
||||||
# hostname: wwwgmo-elasticsearch
|
|
||||||
# image: 'docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}'
|
|
||||||
# #image: 'docker.elastic.co/elasticsearch/elasticsearch:8.8.1'
|
|
||||||
# volumes:
|
|
||||||
# - 'wwwgmo-elasticsearch:/usr/share/elasticsearch/data'
|
|
||||||
# restart: unless-stopped
|
|
||||||
# env_file:
|
|
||||||
# - ./docker/elasticsearch/.env
|
|
||||||
# ulimits:
|
|
||||||
# memlock:
|
|
||||||
# soft: -1
|
|
||||||
# hard: -1
|
|
||||||
# nofile:
|
|
||||||
# soft: 65536
|
|
||||||
# hard: 65536
|
|
||||||
# cap_add:
|
|
||||||
# - IPC_LOCK
|
|
||||||
# ports:
|
|
||||||
# - '9200:9200'
|
|
||||||
# - '9300:9300'
|
|
||||||
#
|
|
||||||
## kibana
|
|
||||||
# wwwgmo_kibana:
|
|
||||||
# container_name: wwwgmo-kibana
|
|
||||||
# hostname: wwwgmo-kibana
|
|
||||||
# image: docker.elastic.co/kibana/kibana:${ELASTIC_VERSION}
|
|
||||||
# #image: docker.elastic.co/kibana/kibana:8.8.1
|
|
||||||
# restart: unless-stopped
|
|
||||||
# env_file:
|
|
||||||
# - ./docker/kibana/.env
|
|
||||||
# ports:
|
|
||||||
# - 5601:5601
|
|
||||||
# #depends_on:
|
|
||||||
# # - wwwgmo-elasticsearch
|
|
||||||
#
|
|
||||||
volumes:
|
volumes:
|
||||||
# wwwgmo-mariadb:
|
wwwgmo-elasticsearch:
|
||||||
# wwwgmo-elasticsearch:
|
name: elasticsearch
|
||||||
esdata:
|
wwwgmo-mariadb:
|
||||||
|
name: mariadb
|
||||||
138
docker-compose.yml.orig
Normal file
138
docker-compose.yml.orig
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
# GMo Lab
|
||||||
|
#version: '2.3'
|
||||||
|
services:
|
||||||
|
|
||||||
|
## Linux nginx mysql php
|
||||||
|
# wwwgmo-nginx:
|
||||||
|
# container_name: wwwgmo-nginx
|
||||||
|
# hostname: wwwgmo-nginx
|
||||||
|
# image: nginxinc/nginx-unprivileged:1.23-alpine
|
||||||
|
# volumes:
|
||||||
|
# - './docker/nginx/default.conf:/etc/nginx/conf.d/default.conf'
|
||||||
|
# - './certs:/etc/nginx/certs/'
|
||||||
|
# - './src:/var/www/html:rw,cached'
|
||||||
|
# ports:
|
||||||
|
# - '${NGINX_PORT}:8080' #local:docker
|
||||||
|
# depends_on:
|
||||||
|
# - wwwgmo-php-fpm
|
||||||
|
##
|
||||||
|
# wwwgmo-php-fpm:
|
||||||
|
# container_name: wwwgmo-php-fpm
|
||||||
|
# hostname: wwwgmo-php-fpm
|
||||||
|
# #image: wwwgmo
|
||||||
|
# image: ${DOCKER_IMAGE}:${DOCKER_IMAGE_VERSION}
|
||||||
|
# env_file:
|
||||||
|
# - ./docker/php-fpm/.env
|
||||||
|
# volumes:
|
||||||
|
# - './src/:/var/www/html:rw,cached'
|
||||||
|
# build:
|
||||||
|
# context: .
|
||||||
|
# dockerfile: ./docker/php-fpm/Dockerfile
|
||||||
|
# ports:
|
||||||
|
# - '9000:9000' #local:docker
|
||||||
|
# depends_on:
|
||||||
|
# - wwwgmo-mariadb
|
||||||
|
#
|
||||||
|
# wwwgmo-mariadb:
|
||||||
|
# container_name: wwwgmo-mariadb
|
||||||
|
# hostname: wwwgmo-mariadb
|
||||||
|
# #image: mysql:8.0-debian
|
||||||
|
# #image: mysql/mysql-server:8.0.27-aarch64
|
||||||
|
# image: mariadb:10.11.7
|
||||||
|
# volumes:
|
||||||
|
# - 'wwwgmo-mariadb:/var/lib/mysql:z'
|
||||||
|
# env_file:
|
||||||
|
# - ./docker/mariadb/.env # ports:
|
||||||
|
#
|
||||||
|
# wwwgmo-phpmyadmin:
|
||||||
|
# container_name: wwwgmo-pma
|
||||||
|
# image: phpmyadmin
|
||||||
|
# links:
|
||||||
|
# - wwwgmo-mariadb
|
||||||
|
# env_file:
|
||||||
|
# - ./docker/mariadb/.env
|
||||||
|
# restart: always
|
||||||
|
# ports:
|
||||||
|
# - ${PMA_PORT_WEB}:80
|
||||||
|
#
|
||||||
|
## EK
|
||||||
|
|
||||||
|
elasticsearch:
|
||||||
|
image: docker.elastic.co/elasticsearch/elasticsearch:8.18.2
|
||||||
|
container_name: elasticsearch
|
||||||
|
environment:
|
||||||
|
- node.name=es01
|
||||||
|
- cluster.name=es-docker-cluster
|
||||||
|
- discovery.type=single-node
|
||||||
|
- bootstrap.memory_lock=true
|
||||||
|
- xpack.security.enabled=true
|
||||||
|
- ELASTIC_PASSWORD=changeme
|
||||||
|
- xpack.security.http.ssl.enabled=false
|
||||||
|
- ES_JAVA_OPTS=-Xms1g -Xmx1g
|
||||||
|
ulimits:
|
||||||
|
memlock:
|
||||||
|
soft: -1
|
||||||
|
hard: -1
|
||||||
|
ports:
|
||||||
|
- "9200:9200"
|
||||||
|
- "9300:9300"
|
||||||
|
volumes:
|
||||||
|
# - esdata:/usr/share/elasticsearch/data
|
||||||
|
- wwwgmo-elasticsearch:/usr/share/elasticsearch/data
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: docker.elastic.co/kibana/kibana:8.18.2
|
||||||
|
container_name: kibana
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
|
- ELASTICSEARCH_USERNAME=elastic
|
||||||
|
- ELASTICSEARCH_PASSWORD=changeme
|
||||||
|
#- xpack.security.enabled=true
|
||||||
|
|
||||||
|
ports:
|
||||||
|
- "5601:5601"
|
||||||
|
|
||||||
|
|
||||||
|
# wwwgmo-elasticsearch:
|
||||||
|
# container_name: wwwgmo-elasticsearch
|
||||||
|
# hostname: wwwgmo-elasticsearch
|
||||||
|
# image: 'docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}'
|
||||||
|
# #image: 'docker.elastic.co/elasticsearch/elasticsearch:8.8.1'
|
||||||
|
# volumes:
|
||||||
|
# - 'wwwgmo-elasticsearch:/usr/share/elasticsearch/data'
|
||||||
|
# restart: unless-stopped
|
||||||
|
# env_file:
|
||||||
|
# - ./docker/elasticsearch/.env
|
||||||
|
# ulimits:
|
||||||
|
# memlock:
|
||||||
|
# soft: -1
|
||||||
|
# hard: -1
|
||||||
|
# nofile:
|
||||||
|
# soft: 65536
|
||||||
|
# hard: 65536
|
||||||
|
# cap_add:
|
||||||
|
# - IPC_LOCK
|
||||||
|
# ports:
|
||||||
|
# - '9200:9200'
|
||||||
|
# - '9300:9300'
|
||||||
|
#
|
||||||
|
## kibana
|
||||||
|
# wwwgmo_kibana:
|
||||||
|
# container_name: wwwgmo-kibana
|
||||||
|
# hostname: wwwgmo-kibana
|
||||||
|
# image: docker.elastic.co/kibana/kibana:${ELASTIC_VERSION}
|
||||||
|
# #image: docker.elastic.co/kibana/kibana:8.8.1
|
||||||
|
# restart: unless-stopped
|
||||||
|
# env_file:
|
||||||
|
# - ./docker/kibana/.env
|
||||||
|
# ports:
|
||||||
|
# - 5601:5601
|
||||||
|
# #depends_on:
|
||||||
|
# # - wwwgmo-elasticsearch
|
||||||
|
#
|
||||||
|
volumes:
|
||||||
|
# wwwgmo-mariadb:
|
||||||
|
wwwgmo-elasticsearch:
|
||||||
|
# esdata:
|
||||||
@ -15,14 +15,15 @@ while test $# -gt 0
|
|||||||
do
|
do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
install)
|
install)
|
||||||
env UID=${_UID} GID=${_GID} docker compose up -d wwwgmo-php-fpm
|
env UID=${_UID} GID=${_GID} docker compose up -d php-fpm
|
||||||
#docker exec --user root wwwgmo-php-fpm "/bin/sh -c 'cd /var/www/html/; composer require elasticsearch/elasticsearch'"
|
#docker exec --user root wwwgmo-php-fpm "/bin/sh -c 'cd /var/www/html/; composer require elasticsearch/elasticsearch'"
|
||||||
docker exec --user root wwwgmo-php-fpm /bin/sh -c "cd /var/www/html; composer require elasticsearch/elasticsearch:^7.17.2"
|
#docker exec --user root ${COMPOSE_PROJECT_NAME}-php-fpm-1 /bin/sh -c "cd /var/www/html; composer require elasticsearch/elasticsearch:^7.17.2"
|
||||||
env UID=${_UID} GID=${_GID} docker compose stop
|
docker exec --user root ${COMPOSE_PROJECT_NAME}-php-fpm-1 /bin/sh -c "cd /var/www/html; composer require elasticsearch/elasticsearch:^8.0"
|
||||||
|
env UID=${_UID} GID=${_GID} docker compose stop
|
||||||
;;
|
;;
|
||||||
start)
|
start)
|
||||||
env UID=${_UID} GID=${_GID} docker compose up -d;
|
env UID=${_UID} GID=${_GID} docker compose up -d;
|
||||||
docker exec --user root wwwgmo-php-fpm composer install --no-interaction
|
docker exec --user root ${COMPOSE_PROJECT_NAME}-php-fpm-1 composer install --no-interaction
|
||||||
;;
|
;;
|
||||||
start-nginx)
|
start-nginx)
|
||||||
env UID=${_UID} GID=${_GID} docker compose up -d wwwgmo-nginx
|
env UID=${_UID} GID=${_GID} docker compose up -d wwwgmo-nginx
|
||||||
|
|||||||
@ -1,6 +1,10 @@
|
|||||||
xpack.security.transport.ssl.enabled=true
|
# config es
|
||||||
xpack.security.enabled=true
|
node.name=es01
|
||||||
|
cluster.name=es-docker-cluster
|
||||||
discovery.type=single-node
|
discovery.type=single-node
|
||||||
cluster.name=gmo_es_cluster
|
bootstrap.memory_lock=true
|
||||||
ELASTIC_USERNAME=elastic
|
xpack.security.enabled=true
|
||||||
ELASTIC_PASSWORD=pa55w0rd
|
xpack.security.http.ssl.enabled=false
|
||||||
|
ES_JAVA_OPTS=-Xms1g -Xmx1g
|
||||||
|
# password for elasric use
|
||||||
|
ELASTIC_PASSWORD=pa55w0rd
|
||||||
|
|||||||
21
docker/elasticsearch/create_kibana_user.sh
Normal file
21
docker/elasticsearch/create_kibana_user.sh
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
!/bin/bash
|
||||||
|
|
||||||
|
# Wait for elasticsearch to start
|
||||||
|
echo "--- waiting for elasticsearch to start..."
|
||||||
|
until curl -u elastic:${ELASTIC_PASSWORD} -s http://localhost:9200 >/dev/null; do
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "--- elasticsearch is ready. Creating the Kibana user..."
|
||||||
|
|
||||||
|
# create a user for Kibana
|
||||||
|
curl -X POST http://localhost:9200/_security/user/kibana_system_user \
|
||||||
|
-u elastic:${ELASTIC_PASSWORD} \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"password" : "kibanapassword",
|
||||||
|
"roles" : [ "kibana_system" ],
|
||||||
|
"full_name" : "Kibana System User"
|
||||||
|
}'
|
||||||
|
|
||||||
|
echo "--- kibana user successfully created."
|
||||||
@ -1,4 +1,6 @@
|
|||||||
ELASTICSEARCH_HOSTS=http://wwwgmo-elasticsearch:9200
|
# es url
|
||||||
ELASTICSEARCH_USERNAME=elastic
|
ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||||
ELASTICSEARCH_PASSWORD=pa55w0rd
|
# username (technical) with which kibana connects to elasticsearch
|
||||||
xpack.security.enabled=true
|
ELASTICSEARCH_USERNAME=kibana_system_user
|
||||||
|
# password for username
|
||||||
|
ELASTICSEARCH_PASSWORD=kibanapassword
|
||||||
|
|||||||
@ -49,7 +49,7 @@
|
|||||||
|
|
||||||
location ~* \.php$ {
|
location ~* \.php$ {
|
||||||
#fastcgi_pass 127.0.0.1:9000;
|
#fastcgi_pass 127.0.0.1:9000;
|
||||||
fastcgi_pass wwwgmo-php-fpm:9000;
|
fastcgi_pass php-fpm:9000;
|
||||||
#fastcgi_pass archivescacm-php-fpm:9000;
|
#fastcgi_pass archivescacm-php-fpm:9000;
|
||||||
include fastcgi_params;
|
include fastcgi_params;
|
||||||
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
||||||
|
|||||||
@ -1,10 +1,5 @@
|
|||||||
{
|
{
|
||||||
"config": {
|
|
||||||
"allow-plugins": {
|
|
||||||
"php-http/discovery": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"require": {
|
"require": {
|
||||||
"elasticsearch/elasticsearch": "^7.17.2"
|
"elasticsearch/elasticsearch": "^8.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1102
src/composer.lock
generated
1102
src/composer.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
|||||||
<?php
|
<?php
|
||||||
require_once "./vendor/autoload.php";
|
require_once "./vendor/autoload.php";
|
||||||
use Elasticsearch\ClientBuilder;
|
use Elastic\Elasticsearch\ClientBuilder;
|
||||||
|
|
||||||
require_once 'config.php';
|
require_once 'config.php';
|
||||||
function connect_es($es_host, $es_user, $es_password)
|
function connect_es($es_host, $es_user, $es_password)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user