diff --git a/aitrainer_backoffice/aitrainer_backoffice/models/customer.py b/aitrainer_backoffice/aitrainer_backoffice/models/customer.py
index 44887a1..d79c1a8 100644
--- a/aitrainer_backoffice/aitrainer_backoffice/models/customer.py
+++ b/aitrainer_backoffice/aitrainer_backoffice/models/customer.py
@@ -16,6 +16,7 @@ class Customer(models.Model):
synced_date = models.DateTimeField(blank=True,null=True)
firebase_reg_token = models.CharField(max_length=255, blank=True, null=True)
lang = models.CharField(max_length=5, blank=True, null=True)
+ trial_date = models.DateTimeField(blank=True,null=True)
def has_add_permission(self, request):
return False
diff --git a/aitrainer_backoffice/aitrainer_backoffice/settings/deploy.py b/aitrainer_backoffice/aitrainer_backoffice/settings/deploy.py
index 238b4c5..a8c86cc 100644
--- a/aitrainer_backoffice/aitrainer_backoffice/settings/deploy.py
+++ b/aitrainer_backoffice/aitrainer_backoffice/settings/deploy.py
@@ -1,7 +1,7 @@
import os
from firebase_admin import initialize_app
-BACKOFFICE_VERSION = "1.33"
+BACKOFFICE_VERSION = "1.34"
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
diff --git a/aitrainer_backoffice/aitrainer_backoffice/settings/dev.py b/aitrainer_backoffice/aitrainer_backoffice/settings/dev.py
index e20becb..d022970 100644
--- a/aitrainer_backoffice/aitrainer_backoffice/settings/dev.py
+++ b/aitrainer_backoffice/aitrainer_backoffice/settings/dev.py
@@ -1,7 +1,7 @@
import os
from firebase_admin import initialize_app
-BACKOFFICE_VERSION = "1.33"
+BACKOFFICE_VERSION = "1.34"
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
diff --git a/aitrainer_backoffice/aitrainer_backoffice/settings/prod.py b/aitrainer_backoffice/aitrainer_backoffice/settings/prod.py
index 14aa649..7b25dfe 100644
--- a/aitrainer_backoffice/aitrainer_backoffice/settings/prod.py
+++ b/aitrainer_backoffice/aitrainer_backoffice/settings/prod.py
@@ -1,7 +1,7 @@
import os
from firebase_admin import initialize_app
-BACKOFFICE_VERSION = "1.33"
+BACKOFFICE_VERSION = "1.34"
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
diff --git a/aitrainer_backoffice/aitrainer_backoffice/templates/controlling/mautic.html b/aitrainer_backoffice/aitrainer_backoffice/templates/controlling/mautic.html
index b84e44a..58dfb92 100644
--- a/aitrainer_backoffice/aitrainer_backoffice/templates/controlling/mautic.html
+++ b/aitrainer_backoffice/aitrainer_backoffice/templates/controlling/mautic.html
@@ -2,9 +2,15 @@
{% block object-tools %}
-
+
+
+
diff --git a/aitrainer_backoffice/controlling/admin/customer.py b/aitrainer_backoffice/controlling/admin/customer.py
index 2c08773..736a5e4 100644
--- a/aitrainer_backoffice/controlling/admin/customer.py
+++ b/aitrainer_backoffice/controlling/admin/customer.py
@@ -77,7 +77,8 @@ class CustomerAdmin(admin.ModelAdmin):
def get_urls(self):
urls = super().get_urls()
my_urls = [
- path('mautic/', self.run_mautic),
+ path('mautic_lang/', self.run_mautic),
+ path('mautic_trial/', self.run_mautic_trial),
path('notif/', self.run_notif),
]
return my_urls + urls
@@ -89,7 +90,12 @@ class CustomerAdmin(admin.ModelAdmin):
def run_mautic(self, request):
mautic = Mautic()
mautic.syncLang()
- return HttpResponseRedirect("../")
+ return HttpResponseRedirect("../")
+
+ def run_mautic_trial(self, request):
+ mautic = Mautic()
+ mautic.syncTrial()
+ return HttpResponseRedirect("../")
admin.site.register(Customer, CustomerAdmin)
diff --git a/aitrainer_backoffice/controlling/automation/mautic.py b/aitrainer_backoffice/controlling/automation/mautic.py
index 23f653d..3545d36 100644
--- a/aitrainer_backoffice/controlling/automation/mautic.py
+++ b/aitrainer_backoffice/controlling/automation/mautic.py
@@ -52,6 +52,40 @@ class Mautic:
print( "Syncronised customer count: " + str(index))
return True
+
+ def syncTrial(self):
+ qs = Customer.objects.raw('SELECT * from customer WHERE trial_date is not null and trial_date_sync is null')
+ logger = logging.getLogger(__name__)
+ logger.info("Syncronising trial date...")
+
+ headers = {
+ 'content-type': "application/x-www-form-urlencoded",
+ 'cache-control': "no-cache"
+ }
+ index = 0
+ for customer in qs:
+ data = "mauticform[email]=" + customer.email + \
+ "&mauticform[database_id]=" + str(customer.customer_id) + \
+ "&mauticform[trialdate]=" + str(customer.trial_date) + \
+ "&mauticform[formId]=3" + \
+ "&mauticform[formName]=appdatachange"
+
+ print(data)
+
+ form_url = 'https://mautic.aitrainer.app/form/submit?formId=3'
+ response = requests.post(form_url, data=data.encode('utf-8'), headers=headers)
+ print(customer.email + " " +str(response.status_code))
+
+ if response.status_code == 200:
+ with connections["live"].cursor() as cursor:
+ cursor.execute("UPDATE customer SET trial_date_sync = NOW() WHERE customer_id="
+ + str(customer.customer_id))
+ print(f'customer {customer.customer_id} is updated')
+
+ index = index + 1
+
+ print( "Syncronised customer count: " + str(index))
+ return True
def sync(self):
diff --git a/aitrainer_backoffice/controlling/cron/cron.py b/aitrainer_backoffice/controlling/cron/cron.py
index c10274f..77020bd 100644
--- a/aitrainer_backoffice/controlling/cron/cron.py
+++ b/aitrainer_backoffice/controlling/cron/cron.py
@@ -1,4 +1,5 @@
from ..automation.notification import NotificationExec
+from ..automation.mautic import Mautic
from django_cron import CronJobBase, Schedule
import datetime
@@ -25,3 +26,16 @@ class NotificationJob(CronJobBase):
print(datetime.datetime.now(), " *** END notification ")
+class TrialJob(CronJobBase):
+
+ RUN_EVERY_MINS = 60
+ schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
+ code = 'aitrainer_backoffice.controlling.trial_sync' # a unique code
+
+ def do(self):
+ print(datetime.datetime.now(), " *** START trial sync ")
+ mautic = Mautic()
+ mautic.syncTrial()
+ print(datetime.datetime.now(), " *** END trial sync ")
+
+
diff --git a/backup.sh b/backup.sh
new file mode 100644
index 0000000..cfde8c3
--- /dev/null
+++ b/backup.sh
@@ -0,0 +1,342 @@
+#!/usr/bin/env bash
+
+DEBIAN_DOCKER_IMAGE="debian:buster-slim"
+
+if [[ ! -z ${MAILCOW_BACKUP_LOCATION} ]]; then
+ BACKUP_LOCATION="${MAILCOW_BACKUP_LOCATION}"
+fi
+
+if [[ ! ${1} =~ (backup|restore) ]]; then
+ echo "First parameter needs to be 'backup' or 'restore'"
+ exit 1
+fi
+
+if [[ ${1} == "backup" && ! ${2} =~ (crypt|vmail|redis|rspamd|postfix|mysql|all|--delete-days) ]]; then
+ echo "Second parameter needs to be 'vmail', 'crypt', 'redis', 'rspamd', 'postfix', 'mysql', 'all' or '--delete-days'"
+ exit 1
+fi
+
+if [[ -z ${BACKUP_LOCATION} ]]; then
+ while [[ -z ${BACKUP_LOCATION} ]]; do
+ read -ep "Backup location (absolute path, starting with /): " BACKUP_LOCATION
+ done
+fi
+
+if [[ ! ${BACKUP_LOCATION} =~ ^/ ]]; then
+ echo "Backup directory needs to be given as absolute path (starting with /)."
+ exit 1
+fi
+
+if [[ -f ${BACKUP_LOCATION} ]]; then
+ echo "${BACKUP_LOCATION} is a file!"
+ exit 1
+fi
+
+if [[ ! -d ${BACKUP_LOCATION} ]]; then
+ echo "${BACKUP_LOCATION} is not a directory"
+ read -p "Create it now? [y|N] " CREATE_BACKUP_LOCATION
+ if [[ ! ${CREATE_BACKUP_LOCATION,,} =~ ^(yes|y)$ ]]; then
+ exit 1
+ else
+ mkdir -p ${BACKUP_LOCATION}
+ chmod 755 ${BACKUP_LOCATION}
+ fi
+else
+ if [[ ${1} == "backup" ]] && [[ -z $(echo $(stat -Lc %a ${BACKUP_LOCATION}) | grep -oE '[0-9][0-9][5-7]') ]]; then
+ echo "${BACKUP_LOCATION} is not write-able for others, that's required for a backup."
+ exit 1
+ fi
+fi
+
+BACKUP_LOCATION=$(echo ${BACKUP_LOCATION} | sed 's#/$##')
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+COMPOSE_FILE=${SCRIPT_DIR}/../docker-compose.yml
+ENV_FILE=${SCRIPT_DIR}/../.env
+if [ ! -f ${COMPOSE_FILE} ]; then
+ echo "Compose file not found"
+ exit 1
+fi
+
+if [ ! -f ${ENV_FILE} ]; then
+ echo "Environment file not found"
+ exit 1
+fi
+
+echo "Using ${BACKUP_LOCATION} as backup/restore location."
+echo
+
+source ${SCRIPT_DIR}/../mailcow.conf
+
+if [[ -z ${COMPOSE_PROJECT_NAME} ]]; then
+ echo "Could not determine compose project name"
+ exit 1
+else
+ echo "Found project name ${COMPOSE_PROJECT_NAME}"
+ CMPS_PRJ=$(echo ${COMPOSE_PROJECT_NAME} | tr -cd "[0-9A-Za-z-_]")
+fi
+
+function backup() {
+ DATE=$(date +"%Y-%m-%d-%H-%M-%S")
+ mkdir -p "${BACKUP_LOCATION}/mailcow-${DATE}"
+ chmod 755 "${BACKUP_LOCATION}/mailcow-${DATE}"
+ cp "${SCRIPT_DIR}/../mailcow.conf" "${BACKUP_LOCATION}/mailcow-${DATE}"
+ while (( "$#" )); do
+ case "$1" in
+ aitrainer|all)
+ docker run --name mailcow-backup --rm \
+ -v ${BACKUP_LOCATION}/mailcow-${DATE}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_vmail-vol-1$):/vmail:ro,z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar --warning='no-file-ignored' --use-compress-program="gzip --rsyncable" -Pcvpf /backup/backup_vmail.tar.gz /vmail
+ ;;&
+ crypt|all)
+ docker run --name mailcow-backup --rm \
+ -v ${BACKUP_LOCATION}/mailcow-${DATE}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_crypt-vol-1$):/crypt:ro,z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar --warning='no-file-ignored' --use-compress-program="gzip --rsyncable" -Pcvpf /backup/backup_crypt.tar.gz /crypt
+ ;;&
+ redis|all)
+ docker exec $(docker ps -qf name=redis-mailcow) redis-cli save
+ docker run --name mailcow-backup --rm \
+ -v ${BACKUP_LOCATION}/mailcow-${DATE}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_redis-vol-1$):/redis:ro,z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar --warning='no-file-ignored' --use-compress-program="gzip --rsyncable" -Pcvpf /backup/backup_redis.tar.gz /redis
+ ;;&
+ rspamd|all)
+ docker run --name mailcow-backup --rm \
+ -v ${BACKUP_LOCATION}/mailcow-${DATE}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_rspamd-vol-1$):/rspamd:ro,z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar --warning='no-file-ignored' --use-compress-program="gzip --rsyncable" -Pcvpf /backup/backup_rspamd.tar.gz /rspamd
+ ;;&
+ postfix|all)
+ docker run --name mailcow-backup --rm \
+ -v ${BACKUP_LOCATION}/mailcow-${DATE}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_postfix-vol-1$):/postfix:ro,z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar --warning='no-file-ignored' --use-compress-program="gzip --rsyncable" -Pcvpf /backup/backup_postfix.tar.gz /postfix
+ ;;&
+ mysql|all)
+ SQLIMAGE=$(grep -iEo '(mysql|mariadb)\:.+' ${COMPOSE_FILE})
+ if [[ -z "${SQLIMAGE}" ]]; then
+ echo "Could not determine SQL image version, skipping backup..."
+ shift
+ continue
+ else
+ echo "Using SQL image ${SQLIMAGE}, starting..."
+ docker run --name mailcow-backup --rm \
+ --network $(docker network ls -qf name=^${CMPS_PRJ}_mailcow-network$) \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_mysql-vol-1$):/var/lib/mysql/:ro,z \
+ -t --entrypoint= \
+ --sysctl net.ipv6.conf.all.disable_ipv6=1 \
+ -v ${BACKUP_LOCATION}/mailcow-${DATE}:/backup:z \
+ ${SQLIMAGE} /bin/sh -c "mariabackup --host mysql --user root --password ${DBROOT} --backup --rsync --target-dir=/backup_mariadb ; \
+ mariabackup --prepare --target-dir=/backup_mariadb ; \
+ chown -R 999:999 /backup_mariadb ; \
+ /bin/tar --warning='no-file-ignored' --use-compress-program='gzip --rsyncable' -Pcvpf /backup/backup_mariadb.tar.gz /backup_mariadb ;"
+ fi
+ ;;&
+ --delete-days)
+ shift
+ if [[ "${1}" =~ ^[0-9]+$ ]]; then
+ find ${BACKUP_LOCATION}/mailcow-* -maxdepth 0 -mmin +$((${1}*60*24)) -exec rm -rvf {} \;
+ else
+ echo "Parameter of --delete-days is not a number."
+ fi
+ ;;
+ esac
+ shift
+ done
+}
+function restore() {
+ echo
+ echo "Stopping watchdog-mailcow..."
+ docker stop $(docker ps -qf name=watchdog-mailcow)
+ echo
+ RESTORE_LOCATION="${1}"
+ shift
+ while (( "$#" )); do
+ case "$1" in
+ vmail)
+ docker stop $(docker ps -qf name=dovecot-mailcow)
+ docker run -it --name mailcow-backup --rm \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_vmail-vol-1$):/vmail:z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar -Pxvzf /backup/backup_vmail.tar.gz
+ docker start $(docker ps -aqf name=dovecot-mailcow)
+ echo
+ echo "In most cases it is not required to run a full resync, you can run the command printed below at any time after testing wether the restore process broke a mailbox:"
+ echo
+ echo "docker exec $(docker ps -qf name=dovecot-mailcow) doveadm force-resync -A '*'"
+ echo
+ read -p "Force a resync now? [y|N] " FORCE_RESYNC
+ if [[ ${FORCE_RESYNC,,} =~ ^(yes|y)$ ]]; then
+ docker exec $(docker ps -qf name=dovecot-mailcow) doveadm force-resync -A '*'
+ else
+ echo "OK, skipped."
+ fi
+ ;;
+ redis)
+ docker stop $(docker ps -qf name=redis-mailcow)
+ docker run -it --name mailcow-backup --rm \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_redis-vol-1$):/redis:z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar -Pxvzf /backup/backup_redis.tar.gz
+ docker start $(docker ps -aqf name=redis-mailcow)
+ ;;
+ crypt)
+ docker stop $(docker ps -qf name=dovecot-mailcow)
+ docker run -it --name mailcow-backup --rm \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_crypt-vol-1$):/crypt:z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar -Pxvzf /backup/backup_crypt.tar.gz
+ docker start $(docker ps -aqf name=dovecot-mailcow)
+ ;;
+ rspamd)
+ docker stop $(docker ps -qf name=rspamd-mailcow)
+ docker run -it --name mailcow-backup --rm \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_rspamd-vol-1$):/rspamd:z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar -Pxvzf /backup/backup_rspamd.tar.gz
+ docker start $(docker ps -aqf name=rspamd-mailcow)
+ ;;
+ postfix)
+ docker stop $(docker ps -qf name=postfix-mailcow)
+ docker run -it --name mailcow-backup --rm \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_postfix-vol-1$):/postfix:z \
+ ${DEBIAN_DOCKER_IMAGE} /bin/tar -Pxvzf /backup/backup_postfix.tar.gz
+ docker start $(docker ps -aqf name=postfix-mailcow)
+ ;;
+ mysql|mariadb)
+ SQLIMAGE=$(grep -iEo '(mysql|mariadb)\:.+' ${COMPOSE_FILE})
+ if [[ -z "${SQLIMAGE}" ]]; then
+ echo "Could not determine SQL image version, skipping restore..."
+ shift
+ continue
+ elif [ ! -f "${RESTORE_LOCATION}/mailcow.conf" ]; then
+ echo "Could not find the corresponding mailcow.conf in ${RESTORE_LOCATION}, skipping restore."
+ echo "If you lost that file, copy the last working mailcow.conf file to ${RESTORE_LOCATION} and restart the restore process."
+ shift
+ continue
+ else
+ read -p "mailcow will be stopped and the currently active mailcow.conf will be modified to use the DB parameters found in ${RESTORE_LOCATION}/mailcow.conf - do you want to proceed? [Y|n] " MYSQL_STOP_MAILCOW
+ if [[ ${MYSQL_STOP_MAILCOW,,} =~ ^(no|n|N)$ ]]; then
+ echo "OK, skipped."
+ shift
+ continue
+ else
+ echo "Stopping mailcow..."
+ docker-compose -f ${COMPOSE_FILE} --env-file ${ENV_FILE} down
+ fi
+ #docker stop $(docker ps -qf name=mysql-mailcow)
+ if [[ -d "${RESTORE_LOCATION}/mysql" ]]; then
+ docker run --name mailcow-backup --rm \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_mysql-vol-1$):/var/lib/mysql/:rw,z \
+ --entrypoint= \
+ -v ${RESTORE_LOCATION}/mysql:/backup:z \
+ ${SQLIMAGE} /bin/bash -c "shopt -s dotglob ; /bin/rm -rf /var/lib/mysql/* ; rsync -avh --usermap=root:mysql --groupmap=root:mysql /backup/ /var/lib/mysql/"
+ elif [[ -f "${RESTORE_LOCATION}/backup_mysql.gz" ]]; then
+ docker run \
+ -it --name mailcow-backup --rm \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_mysql-vol-1$):/var/lib/mysql/:z \
+ --entrypoint= \
+ -u mysql \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ ${SQLIMAGE} /bin/sh -c "mysqld --skip-grant-tables & \
+ until mysqladmin ping; do sleep 3; done && \
+ echo Restoring... && \
+ gunzip < backup/backup_mysql.gz | mysql -uroot && \
+ mysql -uroot -e SHUTDOWN;"
+ elif [[ -f "${RESTORE_LOCATION}/backup_mariadb.tar.gz" ]]; then
+ docker run --name mailcow-backup --rm \
+ -v $(docker volume ls -qf name=^${CMPS_PRJ}_mysql-vol-1$):/backup_mariadb/:rw,z \
+ --entrypoint= \
+ -v ${RESTORE_LOCATION}:/backup:z \
+ ${SQLIMAGE} /bin/bash -c "shopt -s dotglob ; \
+ /bin/rm -rf /backup_mariadb/* ; \
+ /bin/tar -Pxvzf /backup/backup_mariadb.tar.gz"
+ fi
+ echo "Modifying mailcow.conf..."
+ source ${RESTORE_LOCATION}/mailcow.conf
+ sed -i --follow-symlinks "/DBNAME/c\DBNAME=${DBNAME}" ${SCRIPT_DIR}/../mailcow.conf
+ sed -i --follow-symlinks "/DBUSER/c\DBUSER=${DBUSER}" ${SCRIPT_DIR}/../mailcow.conf
+ sed -i --follow-symlinks "/DBPASS/c\DBPASS=${DBPASS}" ${SCRIPT_DIR}/../mailcow.conf
+ sed -i --follow-symlinks "/DBROOT/c\DBROOT=${DBROOT}" ${SCRIPT_DIR}/../mailcow.conf
+ source ${SCRIPT_DIR}/../mailcow.conf
+ echo "Starting mailcow..."
+ docker-compose -f ${COMPOSE_FILE} --env-file ${ENV_FILE} up -d
+ #docker start $(docker ps -aqf name=mysql-mailcow)
+ fi
+ ;;
+ esac
+ shift
+ done
+ echo
+ echo "Starting watchdog-mailcow..."
+ docker start $(docker ps -aqf name=watchdog-mailcow)
+}
+
+if [[ ${1} == "backup" ]]; then
+ backup ${@,,}
+elif [[ ${1} == "restore" ]]; then
+ i=1
+ declare -A FOLDER_SELECTION
+ if [[ $(find ${BACKUP_LOCATION}/mailcow-* -maxdepth 1 -type d 2> /dev/null| wc -l) -lt 1 ]]; then
+ echo "Selected backup location has no subfolders"
+ exit 1
+ fi
+ for folder in $(ls -d ${BACKUP_LOCATION}/mailcow-*/); do
+ echo "[ ${i} ] - ${folder}"
+ FOLDER_SELECTION[${i}]="${folder}"
+ ((i++))
+ done
+ echo
+ input_sel=0
+ while [[ ${input_sel} -lt 1 || ${input_sel} -gt ${i} ]]; do
+ read -p "Select a restore point: " input_sel
+ done
+ i=1
+ echo
+ declare -A FILE_SELECTION
+ RESTORE_POINT="${FOLDER_SELECTION[${input_sel}]}"
+ if [[ -z $(find "${FOLDER_SELECTION[${input_sel}]}" -maxdepth 1 \( -type d -o -type f \) -regex ".*\(redis\|rspamd\|mariadb\|mysql\|crypt\|vmail\|postfix\).*") ]]; then
+ echo "No datasets found"
+ exit 1
+ fi
+
+ echo "[ 0 ] - all"
+ # find all files in folder with *.gz extension, print their base names, remove backup_, remove .tar (if present), remove .gz
+ FILE_SELECTION[0]=$(find "${FOLDER_SELECTION[${input_sel}]}" -maxdepth 1 \( -type d -o -type f \) \( -name '*.gz' -o -name 'mysql' \) -printf '%f\n' | sed 's/backup_*//' | sed 's/\.[^.]*$//' | sed 's/\.[^.]*$//')
+ for file in $(ls -f "${FOLDER_SELECTION[${input_sel}]}"); do
+ if [[ ${file} =~ vmail ]]; then
+ echo "[ ${i} ] - Mail directory (/var/vmail)"
+ FILE_SELECTION[${i}]="vmail"
+ ((i++))
+ elif [[ ${file} =~ crypt ]]; then
+ echo "[ ${i} ] - Crypt data"
+ FILE_SELECTION[${i}]="crypt"
+ ((i++))
+ elif [[ ${file} =~ redis ]]; then
+ echo "[ ${i} ] - Redis DB"
+ FILE_SELECTION[${i}]="redis"
+ ((i++))
+ elif [[ ${file} =~ rspamd ]]; then
+ echo "[ ${i} ] - Rspamd data"
+ FILE_SELECTION[${i}]="rspamd"
+ ((i++))
+ elif [[ ${file} =~ postfix ]]; then
+ echo "[ ${i} ] - Postfix data"
+ FILE_SELECTION[${i}]="postfix"
+ ((i++))
+ elif [[ ${file} =~ mysql ]] || [[ ${file} =~ mariadb ]]; then
+ echo "[ ${i} ] - SQL DB"
+ FILE_SELECTION[${i}]="mysql"
+ ((i++))
+ fi
+ done
+ echo
+ input_sel=-1
+ while [[ ${input_sel} -lt 0 || ${input_sel} -gt ${i} ]]; do
+ read -p "Select a dataset to restore: " input_sel
+ done
+ echo "Restoring ${FILE_SELECTION[${input_sel}]} from ${RESTORE_POINT}..."
+ restore "${RESTORE_POINT}" ${FILE_SELECTION[${input_sel}]}
+fi
\ No newline at end of file
diff --git a/backup_aitrainer.sh b/backup_aitrainer.sh
new file mode 100644
index 0000000..ddbe039
--- /dev/null
+++ b/backup_aitrainer.sh
@@ -0,0 +1,79 @@
+if [[ ! -z ${MAILCOW_BACKUP_LOCATION} ]]; then
+ BACKUP_LOCATION="${MAILCOW_BACKUP_LOCATION}"
+fi
+
+if [[ -z ${BACKUP_LOCATION} ]]; then
+ while [[ -z ${BACKUP_LOCATION} ]]; do
+ read -ep "Backup location (absolute path, starting with /): " BACKUP_LOCATION
+ done
+fi
+
+if [[ ! ${1} =~ (backup|restore) ]]; then
+ echo "First parameter needs to be 'backup' or 'restore'"
+ exit 1
+fi
+
+if [[ ${1} == "backup" && ! ${2} =~ (aitrainer|all|--delete-days) ]]; then
+ echo "Second parameter needs to be 'aitrainer', 'all' or '--delete-days'"
+ exit 1
+fi
+
+BACKUP_LOCATION=$(echo ${BACKUP_LOCATION} | sed 's#/$##')
+SCRIPT_DIR=/home/bosi/backoffice/aitrainer_backoffice
+COMPOSE_FILE=${SCRIPT_DIR}/docker-compose.yml
+ENV_FILE=${SCRIPT_DIR}/.env
+if [ ! -f ${COMPOSE_FILE} ]; then
+ echo "Compose file not found"
+ exit 1
+fi
+
+echo "Using ${BACKUP_LOCATION} as backup/restore location."
+echo
+
+function backup() {
+ DATE=$(date +"%Y-%m-%d-%H-%M-%S")
+ CMPS_PRJ=aitrainer_backoffice
+ mkdir -p "${BACKUP_LOCATION}/aitrainer-${DATE}"
+ chmod 755 "${BACKUP_LOCATION}/aitrainer-${DATE}"
+ #cp "${SCRIPT_DIR}/../mailcow.conf" "${BACKUP_LOCATION}/mailcow-${DATE}"
+ while (( "$#" )); do
+ case "$1" in
+ gitlab|all)
+ docker exec -t gitlab-backup create
+ aitrainer_db|all)
+ SQLIMAGE=$(grep -iEo '(mysql|mariadb)\:.+' ${COMPOSE_FILE})
+ if [[ -z "${SQLIMAGE}" ]]; then
+ echo "Could not determine SQL image version, skipping backup..."
+ shift
+ continue
+ else
+ echo "Using SQL image ${SQLIMAGE}, starting..."
+ docker exec aitrainer-backup sh -c 'exec mysqldump --all-databases -uroot -p${MYSQL_ROOT_PASSWORD}' > ${BACKUP_LOCATION}/all-databases.sql
+
+ docker run --name aitrainer-backup --rm \
+ #-v $(docker volume ls -qf name=^aitrainer_backoffice_mysql-vol-1$):/var/lib/mysql/:ro,z \
+ #-t --entrypoint= \
+ #--sysctl net.ipv6.conf.all.disable_ipv6=1 \
+ -v ${BACKUP_LOCATION}/aitrainer-${DATE}:/backup:z \
+ ${SQLIMAGE} /bin/sh -c "mysqldump --host mysql-server --user root --password ${MYSQL_ROOT_PASSWORD} --backup --rsync --target-dir=/backup_mysql ; \
+ mysqldump --prepare --target-dir=/backup_mysql ; \
+ chown -R 999:999 /backup_mysql ; \
+ /bin/tar --warning='no-file-ignored' --use-compress-program='gzip --rsyncable' -Pcvpf /backup/backup_mysql.tar.gz /backup_mysql ;"
+ ;;&
+ fi
+ --delete-days)
+ shift
+ if [[ "${1}" =~ ^[0-9]+$ ]]; then
+ find ${BACKUP_LOCATION}/aitrainer-* -maxdepth 0 -mmin +$((${1}*60*24)) -exec rm -rvf {} \;
+ else
+ echo "Parameter of --delete-days is not a number."
+ fi
+ ;;
+ esac
+ shift
+ done
+}
+
+if [[ ${1} == "backup" ]]; then
+ backup ${@,,}
+fi
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index a0fe58d..55259f0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,7 +1,7 @@
version: "3.7"
services:
-
+
uvdesk:
image: dietermartens/uvdesk
environment:
@@ -13,12 +13,14 @@ services:
- APP_CURRENCY=EUR
- CRON_USER1=root
- CRON_SCHEDULE1=*/5 * * * *
- # - CRON_COMMAND1=cd /usr/local/uvdesk && php bin/console uvdesk:refresh-mailbox info@domain.tld support@domain.tld
+ - CRON_COMMAND1=cd /var/www/html && php /var/www/html/bin/console uvdesk:refresh-mailbox service@workouttest.com
restart: always
+ #extra_hosts:
+ # - "{uvdesk.aitrainer.info}:127.0.0.1"
volumes:
- uvdesk:/usr/local/uvdesk
ports:
- - "7089:89"
+ - "7089:80"
web:
build:
@@ -37,8 +39,7 @@ services:
- GOOGLE_APPLICATION_CREDENTIALS=/aitrainer_backoffice/aitrainer_backoffice/aitrainer_backoffice/asset/aitrainer-firebase-adminsdk.json
- WORKOUTTEST_SETTING=PROD
command: gunicorn aitrainer_backoffice.aitrainer_backoffice.wsgi --env DJANGO_SETTINGS_MODULE=aitrainer_backoffice.aitrainer_backoffice.settings.prod --bind 0.0.0.0:8000 --workers 3
-
- cronjobs:
+cronjobs:
build:
context: .
dockerfile: ./Dockerfile
@@ -46,7 +47,7 @@ services:
- GOOGLE_APPLICATION_CREDENTIALS=/aitrainer_backoffice/aitrainer_backoffice/aitrainer_backoffice/asset/aitrainer-firebase-adminsdk.json
- WORKOUTTEST_SETTING=DEPLOY
- DJANGO_KEY=${DJANGO_KEY}
- command: ["cron", "-f"]
+ command: ["cron", "-f"]
mysql-server:
image: mysql:8.0.21
@@ -58,23 +59,17 @@ services:
- /home/bosi/backoffice/aitrainer_backoffice/mysqlconf:/etc/mysql/conf.d
ports:
- "33060:3306"
- networks:
- - redis_tier
phpmyadmin:
image: phpmyadmin/phpmyadmin:latest
restart: always
environment:
PMA_HOST: mysql-server
- PMA_USER: $MYSQL_USER
PMA_PASSWORD: $MYSQL_ROOT_PASSWORD
ports:
- "8085:80"
volumes:
- - ./htpasswd:/etc/phpmyadmin/.htpasswd
- ./phpmyadmin.config.php:/etc/phpmyadmin/config.user.inc.php
- networks:
- - redis_tier
wordpress:
image: wordpress
@@ -84,7 +79,6 @@ services:
- mysql-server:mysql
depends_on:
- mysql-server
- - redis
ports:
- "8090:80"
environment:
@@ -107,19 +101,8 @@ services:
source: /usr/local/bin/wordpress/wp_htaccess
consistency: cached
- /usr/local/bin/wordpress/wp-content/uploads:/var/www/html/wp-content/uploads:cached
- networks:
- - redis_tier
- redis:
- image: redis
- environment:
- - REDIS_URL=redis://redis
- ports:
- - '6379:6379'
- networks:
- - redis_tier
-
- mautic:
+ mautic:
image: mautic/mautic:v3
container_name: mautic
links:
@@ -136,22 +119,6 @@ services:
- MAUTIC_DB_PASSWORD=$MYSQL_ROOT_PASSWORD
- MAUTIC_DB_NAME=mautic
- MAUTIC_RUN_CRON_JOBS=true
- networks:
- - redis_tier
-
- api:
- # container_name: api_server
- image: api_server:latest
- build: .
- depends_on:
- - mysql-server
- restart: always
- working_dir: /aitrainer_server
- environment:
- SPRING_DATASOURCE_URL: jdbc:mysql://mysql-server:3306/aitrainer?serverTimezone=CET&useSSL=false&characterEncoding=UTF-8&allowPublicKeyRetrieval=true&allowMultiQueries=true
- command: bash -c "SERVER_PORT=8888 java -jar aitrainer_server.jar --spring.profiles.active=prod"
- ports:
- - "8888:8888"
api_test_ssl:
image: api_server_ssl_test:latest
@@ -193,19 +160,12 @@ services:
DB_NAME: wiki
restart: unless-stopped
ports:
- - "3000:3000"
- networks:
- - redis_tier
+ - "3000:3000"
volumes:
mysql-data-backoffice:
media:
- static:
+ static:
wordpress:
mautic_data:
- uvdesk:
- redis_data:
-
-networks:
- redis_tier:
- driver: bridge
\ No newline at end of file
+ uvdesk:
\ No newline at end of file