wordpress-nginx
wordpress-nginx copied to clipboard
Having changes permanent: volume changes are lost on docker-compose restart
Hello, for different reasons I would prefer not to create a new container based on yours where I could save my own version of vhost.conf
server_name hellocasa.shared;
access_log /var/www/html/access.log;
error_log /var/www/html/error.log;
root /var/www/html;
index index.php;
location / {
# if ( $uri !~ "^(.*)/(wp-admin)") {
# return 404;
#}
try_files $uri $uri/ @handler /index.php?$args;
}
location @handler {
fastcgi_pass 127.0.0.1:9000;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
include fastcgi_params;
fastcgi_index index.php;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
# fastcgi_intercept_errors on;
}
location @express {
proxy_pass http://172.17.0.3:8080;
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
proxy_redirect off;
proxy_buffering off;
proxy_set_header Host $host;
}
location ~ "^(.*)/(experts)" {
root /var/www/html/ ;
try_files $uri @express;
}
location ~ \.php$ {
try_files $uri @handler;
fastcgi_pass 127.0.0.1:9000;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
include fastcgi_params;
fastcgi_index index.php;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
#fastcgi_intercept_errors on;
}
}
This is my docker-compose.yml
version: "3.3"
services:
db:
image: wodby/mariadb
volumes:
- db-data:/var/lib/mysql
environment:
- MYSQL_USER=user
- MYSQL_PASSWORD=password
- MYSQL_DATABASE=database
- MYSQL_ROOT_PASSWORD=password
ports:
- "3306:3306"
network_mode: bridge
web:
privileged: on
image: wodby/wordpress-nginx
environment:
- NGINX_BACKEND_HOST=localhost
volumes:
- ./hellocasawebsite:/var/www/html
- nginx_config:/etc/nginx/conf.d
ports:
- "80:80"
depends_on:
- db
network_mode: bridge
node:
image: "node:9"
user: "root"
working_dir: /home/node/app
volumes:
- ./api:/home/node/app
- node_modules:/home/node/app/node_modules
# ports:
# - "3030:9000"
ports:
- "8080"
depends_on:
- web
command: "/bin/bash -c 'chown -R node /home/node/app; chmod 775 -R /home/node/app; su node -; npm install; npm run watch'"
network_mode: bridge
volumes:
db-data:
node_modules:
nginx_config:
And this is my node script that populates the database and changes the vhost.conf
#!/usr/bin/env node
const fs = require('fs');
const path = require('path');
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const {promisify} = require('util');
const readFileAsync = promisify(fs.readFile);
const filePath = `${path.join(__dirname, '.')}/vhost.conf`;
__NGINX_REMOTE_CONF_PATH="/etc/nginx/conf.d/vhost.conf";
__DOCKER_CONTAINER_NAME="hellocasa_web_1";
__DOCKER_SERVICE_NAME="web"
async function configExists() {
try {
const { stdout } = await exec(`docker exec -i ${__DOCKER_CONTAINER_NAME} ls ${__NGINX_REMOTE_CONF_PATH}`);
console.log('stdout', stdout);
return stdout
}
catch(error) {
return '';
}
}
async function backupConfigExists() {
try {
const { stdout } = await exec(`docker exec -i ${__DOCKER_CONTAINER_NAME} ls ${__NGINX_REMOTE_CONF_PATH}.orig`);
console.log('stdout', stdout);
return stdout;
}
catch(error) {
return '';
}
}
async function backupOldConfig() {
try {
const { stdout } = await exec(`docker exec -i ${__DOCKER_CONTAINER_NAME} cp ${__NGINX_REMOTE_CONF_PATH} ${__NGINX_REMOTE_CONF_PATH}.orig`);
console.log('stdout', stdout);
return stdout;
}
catch(error) {
console.log(error.stderr);
}
}
async function copyNewConfig() {
try {
const MY_NGINX_CONFIG = await readFileAsync(filePath, {encoding: 'utf8'});
const something=`$(cat << END_HEREDOC
echo '${MY_NGINX_CONFIG}' > ${__NGINX_REMOTE_CONF_PATH}
)
`
// console.log(`${path.join(__dirname, '.')}/vhost.conf`);
const {stdout} = await exec(`docker exec -i ${__DOCKER_CONTAINER_NAME} bash -c "${something}"`);
// const { stdout } = await exec(`docker cp ${path.join(__dirname, '/')}vhost.conf ${__DOCKER_CONTAINER_NAME}:/etc/nginx/conf.d/vhost.conf`);
// console.log('stdout', stdout);
// return stdout;
}
catch(error) {
console.log(error);
}
}
// async function commitChanges() {
// try {
// const { stdout} = await exec(`docker commit ${__DOCKER_CONTAINER_NAME}`);
// console.log('stdout', stdout);
// return stdout;
// }
// catch(error) {
// console.log(error.stderr);
// }
// }
async function restartContainer() {
try {
const {stdout} = await exec(`docker-compose restart ${__DOCKER_SERVICE_NAME}`);
console.log('stdout', stdout);
return stdout;
} catch (error) {
console.log(error.stdout);
}
}
async function tearUpContainers() {
try {
const {stdout} = await exec (`docker-compose up -d`);
console.info('STARTING COTAINERS!');
console.log(stdout);
console.info('CONTAINERS STARTED');
} catch (error) {
console.log(error.stderr)
}
}
async function migrateDb() {
try {
DATABASE_NAME="database"
TABLE_CONTAINS="wp_"
USERNAME="username"
PASSWORD="password"
HOST="localhost"
const SQL_SCRIPT = await readFileAsync(`${path.join(__dirname, '.')}/${DATABASE_NAME}.sql`, {encoding: 'utf8'});
SQL=`$(
cat << END_HEREDOC
mysql -u ${USERNAME} -p${PASSWORD} -h ${HOST} -e "USE ${DATABASE_NAME}; SHOW TABLES" 2> /dev/null |grep -i ${TABLE_CONTAINS} | head -2 | tail -1
)`
SQL2=`
$(
cat << END_HEREDOC
mysql -u ${USERNAME} -p${PASSWORD} -h ${HOST} ${DATABASE_NAME} < /home/mysql/${DATABASE_NAME}.sql
)
`
const {stdout: TABLE} = await exec(`docker exec -i hellocasa_db_1 bash -c "${SQL}"`);
if (TABLE && TABLE !='' && TABLE != ' ') {
console.log(`${TABLE} exists. Database already populated`);
} else {
console.log('Populating the database');
const { stdout: copy_db_output } = await exec(`docker cp ${path.join(__dirname, '.')}/${DATABASE_NAME}.sql hellocasa_db_1:/home/mysql/${DATABASE_NAME}.sql`);
const { stdout: populate_output} = await exec (`docker exec -i hellocasa_db_1 bash -c "${SQL2}"`);
console.log(copy_db_output+populate_output);
console.log('finished');
}
} catch(error) {
console.error(error);
}
}
function timeout(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function main() {
try {
await tearUpContainers();
await timeout(10000);
await migrateDb();
const config = (await configExists()).trim();
const backup = (await backupConfigExists()).trim();
console.log('LOGGGG', config);
console.log('__NGINX_REMOTE_CONF_PATH', __NGINX_REMOTE_CONF_PATH);
console.log('config', config);
if (__NGINX_REMOTE_CONF_PATH == config) {
if (`${__NGINX_REMOTE_CONF_PATH}.orig` != backup) {
await backupOldConfig();
console.info('The original vhost config was backed up');
}
console.info('Copying local vhost conf into the container');
copyNewConfig();
console.info('File copy successful');
// commitChanges();
// console.info('Changes to the container have been committed');
console.info('Restarting container');
restartContainer();
console.info('Done');
} else {
console.error('The path is not valid anymore or some error was thrown');
}
} catch(error) {
console.log(error);
}
}
main();
When I do docker-compose restart web, the changes to the vhost.conf get lost. How can I fix this?
As I said, I need this for different corporate reasons: I would have to ask a company account to upload the images, and the purpose of this set-up is to get everyone on board on this project in the shortest possible time without having to ask for 2000 permissions.
Thank you.
You can rewrite the default virtual host config by changing NGINX_CONF_INCLUDE to your config under version control (mounted to nginx)