diff --git a/backup.sh b/backup.sh index 3142cc319acdada74320b309f9bb469aba791756..c9ddb85ed1dfdfbec8b7b0527994d86ec65af599 100755 --- a/backup.sh +++ b/backup.sh @@ -177,6 +177,7 @@ EOFbackupinfo if [ $bStart -eq 1 ]; then + j_notify "Start" "Starting a backup run..." _j_runHooks "100-before-backup" sleep 3 @@ -194,6 +195,7 @@ EOFbackupinfo "${DIR_SELF}"/localdump.sh ALL | tee -a "$JOB_LOGFILE" rcBackup=$? + test $rcBackup -gt 0 && j_notify "db dumps" "rc=$rcBackup" $rcBackup echo "INFO: $(date) - local backups were finished" | tee -a "$JOB_LOGFILE" echo @@ -260,6 +262,7 @@ EOFbackupinfo # -------------------------------------------------------------------------------- + j_notify "Done" "Backup was finished. rc=$rcBackup. See log for details: $JOB_LOGFILE" $rcBackup exit $rcBackup diff --git a/check_clientbackup.sh b/check_clientbackup.sh index 031f6ee6fd400b3f398df7c10d3fb57dba41fdd2..3baa29e84e0773aac2c3eff53db90a4de44aea97 100755 --- a/check_clientbackup.sh +++ b/check_clientbackup.sh @@ -150,11 +150,14 @@ else fi echo ">>> Summary of database backup actions:" - cat $logfile | grep "__[A-Z][A-Z]*__" | grep '__DB__' | filterColor + cat $logfile | grep '__DB__' | filterColor | cut -f 1 -d ' ' | sort -u | while read -r dbprefix + do + ( grep "$dbprefix SKIP" $logfile || grep "$dbprefix " $logfile ) | cut -f 2- -d ':' + done echo echo ">>> Summary of transfer actions:" - cat $logfile | grep "__[A-Z][A-Z]*__" | grep -E '__(BACKUP|PRUNE|REPO|VERIFY)__' | filterColor + cat $logfile | grep -E '__(BACKUP|PRUNE|REPO|VERIFY)__' | filterColor echo sShort="$sShort - OK: $iOK ... Errors: $iError" diff --git a/docs/10_Features.md b/docs/10_Features.md index 1927ee4eea64a81342d61cc79e6d2f085fb91ab5..e61798465bfc236e8bed779d8fca4acfc84d7c69 100644 --- a/docs/10_Features.md +++ b/docs/10_Features.md @@ -144,12 +144,14 @@ See [Hooks](50_Hooks/_index.md) ## Restic ## -* creates one initial full backup - and then never again. +* creates one initial full backup - and then never again (then it starts incremental backups only). * encrypts data * deduplicates files * delete backups by rules to keep a count of hourly, daily, weekly, mothly, yearly backups * several backup targets (currently supported protocols: sftp:// https:// and file://) * Single binary (written in Go) +* MS Windows: Backup uses volume shadow copy service (vss) +* Linux: with Fuse you can browse you can mount the backup repository and browse through all snapshots ## Duplicity ## diff --git a/docs/15_Requirements.md b/docs/15_Requirements.md new file mode 100644 index 0000000000000000000000000000000000000000..69ed3394b880126f80436cef6d60891403a5ba92 --- /dev/null +++ b/docs/15_Requirements.md @@ -0,0 +1,14 @@ +# What you need to run: + +* GNU core utilities + * Linux: On a basic linux installation it will run "out of the box" + * MS Windows: You need a set of GNU tools. + * Cygwin: https://cygwin.com/ + * MINGW: https://www.mingw-w64.org/ +* Bash + * MS Windows: You can add let open .sh files with bash +* Restic + * see the Docs https://restic.readthedocs.io/en/latest/020_installation.html + * Get the binary from your package manager or https://github.com/restic/restic/releases + * if you don't have the binary in a directory of $PATH you can add it in the environment of the backup. See Configuration -> File env. + \ No newline at end of file diff --git a/docs/20_Installation.md b/docs/20_Installation.md index a4a287e665ef3fa979b8584d6794c5d0361ea3fb..3e624c5622cc6c0b008d27de2f0bf82d5ce0bd53 100644 --- a/docs/20_Installation.md +++ b/docs/20_Installation.md @@ -9,7 +9,7 @@ ## Installation with git clone -The most easy way is to use git pull. +The most easy way to install it and keep it up to date is to use `git clone|pull`. If you don't have / want to use git see the next chapter "Manual installation: uncompress archive". As user **root**: diff --git a/docs/30_Configuration/50_File_dirs.job.md b/docs/30_Configuration/50_File_dirs.job.md index a31ac0448b7bb207b5642ae9e997485a40b1ca1b..6c7fcf70c854a25732bd895771bd97acb751c0ce 100644 --- a/docs/30_Configuration/50_File_dirs.job.md +++ b/docs/30_Configuration/50_File_dirs.job.md @@ -78,6 +78,21 @@ because of a wrong exclude pattern. `exclude = /tmp/.*` +# Hints for MS Windows # + +To define folders to backup you should not use the bash unix like notation with forward slash eg. `/c/your/path`. +There seems to be a bug in restic to show snapshots on MS Windows written with /. +https://github.com/restic/restic/issues/2397 + +Use the backslash - and quote it, eg: + +```txt +include = c:\\backup +include = c:\\inetpub +include = c:\\scripts +include = c:\\Users +``` + ## TODO: custom single dirs with includes and exludes ## TODO. diff --git a/docs/40_Usage/10_Backup.md b/docs/40_Usage/10_Backup.md index 290950dbb39c2b5a4d3e88ef3ff0ceee23808e11..d9f8b989b26ca804cb7b34d474831dcf2a66f088 100644 --- a/docs/40_Usage/10_Backup.md +++ b/docs/40_Usage/10_Backup.md @@ -1,4 +1,20 @@ -``` +## Typical Usage + +This is the main script to start a backup. The script *backup.sh* is the one to add as a cronjob. It will start + +* the dumps of local databases +* the file transfer to an external repository + +Both of these steps can be started seperately (see next pages). + +## Typical Usage + +* on Linux: as a non root user use sudo `sudo ./backup.sh` +* on MS Windows: start it as user `./backup.sh`. Or double click it in a file manager (e.g. Explorer). The first time it asks what program should be used to open *.sh files - select the bash.exe ornavigate to it. + +## Help + +```txt # ./backup.sh -? ___ ___ ___ ___ _______ __ diff --git a/docs/40_Usage/20_Database.md b/docs/40_Usage/20_Database.md index 35ced633e166d039d1b44930780291f8228f8dca..daf43898a4235a267500ae86c84276cdea17573a 100644 --- a/docs/40_Usage/20_Database.md +++ b/docs/40_Usage/20_Database.md @@ -1,6 +1,9 @@ -## Make database dumps ## +## Make database dumps -To create backup dumps we use `./localdump.sh` +To create backup database dumps without transfer of local directory to a backup target use `sudo ./localdump.sh`. +Backup dumps will be stored as gzip files into `/var/iml-backup/[service]`. + +## Help ```text SYNTAX: @@ -20,7 +23,7 @@ ldap mysql pgsql sqlite -```text +``` If you have local Mysql daemon or Pgsql you can test it by starting @@ -43,7 +46,7 @@ To dump schemes of a specific database type add the name of a known service. sudo ./localdump.sh mysql ``` -## Structure in the backup folder ## +## Structure in the backup folder In the database dump folder is a subdir per service `/var/iml-backup/[service]`. @@ -54,7 +57,7 @@ All dumps are gzip compressed. At the end of a backup task with localdump.sh older files older than *keep-days* will be deleted from `/var/iml-backup/[service]`. -### Backup sqlite ### +### Backup sqlite Keep in mind that you need to define sqlite databases in jobs/backup-dbfiles.job first. @@ -70,10 +73,11 @@ In the folder /var/iml-backup/sqlite/ it creates 2 files per database * the gzip compressed dump (filename is full path with replacing `/` by `_`) * a .META file that contains the original full path for restore -## Restore database dumps ## +## Restore database dumps Remark: -To make a database restore its dump must be located at this directory. +To make a database restore its dump must be located at this directory: +`/var/iml-backup/[service]` The value keepdays contains number of days how long to keep dumps locally. If your dump to restore is older than this given range then you need diff --git a/docs/40_Usage/30_Filetransfer.md b/docs/40_Usage/30_Filetransfer.md index aeac6cc4a32a8091e62ea983f9b069dbfa73752b..f127b808ede2388464555388ef2db84c73898ead 100644 --- a/docs/40_Usage/30_Filetransfer.md +++ b/docs/40_Usage/30_Filetransfer.md @@ -1,4 +1,8 @@ -``` +## Transfer files ## + +To transfer local directories to a backup repository use `sudo ./transfer.sh`. + +```txt > ./transfer.sh -h ___ ___ ___ ___ _______ __ diff --git a/docs/40_Usage/40_Cronjob.md b/docs/40_Usage/40_Cronjob.md index 3e71eb40a5fc770f1fc4142f9ed28a971e0929ef..2149ef2998be4b8fc6c79b43d6a7f8b141502a3e 100644 --- a/docs/40_Usage/40_Cronjob.md +++ b/docs/40_Usage/40_Cronjob.md @@ -44,3 +44,18 @@ $ cat /etc/cron.d/client-backup 7 18 * * 6 root /usr/local/bin/cronwrapper.sh 1440 /opt/imlbackup/client/prune.sh 'iml-backup-prune' 7 18 * * 0 root /usr/local/bin/cronwrapper.sh 1440 /opt/imlbackup/client/verify.sh 'iml-backup-verify' ``` + +## Desktop notification + +If you don't run the backup on a server but on a client you can activate the desktop notification for a user that is logged in. +Set a variable `SUPER_USER=<USERNAME>` in your cron file. + +Example: + +```shell +$ cat /etc/cron.d/client-backup + +SUPER_USER=axel + +*/17 * * * root /usr/local/bin/cronwrapper.sh 1440 /opt/imlbackup/client/backup.sh 'iml-backup' +``` diff --git a/jobhelper.sh b/jobhelper.sh index d413db4805b6960279b378b270847d0f22779bc3..02519c458938d0e9be33385cf8954192f8930c86 100755 --- a/jobhelper.sh +++ b/jobhelper.sh @@ -14,6 +14,7 @@ # 2018-02-13 ah,ds v1.3 detect samba shares based on a flag # 2022-10-07 ah v1.4 unescape regex with space to prevent "grep: warning: stray \ before white space" # 2023-03-17 ah v1.5 ignore required user on MS windows; source jobs/env if it exists; use varaiable FQDN +# 2023-04-12 ah v1.6 add desktop notification # ================================================================================ @@ -82,6 +83,11 @@ function j_init(){ j_requireBinary "tee" j_requireBinary "touch" + # for notify-send in j_notify() + if [ -n "$SUDO_USER" ]; then + export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/$(id -u $SUDO_USER)/bus + fi + # j_read } @@ -419,6 +425,30 @@ function _getFqdn(){ # echo "INFO: FQDN is [$FQDN]" } +# show a desktop notification using notify-send +# param string summary (aka title) +# param string message text +# paran integer optional: exitcode; if set it adds a prefix OK or ERRROR on summary and sets urgency on error +function j_notify(){ + local _summary="IML BACKUP :: $1" + local _body="$( date +%H:%M:%S ) $2" + local _rc="$3" + + local _urgency="normal" + + if [ -n "$DBUS_SESSION_BUS_ADDRESS" ]; then + if [ -n "$_rc" ]; then + if [ "$_rc" = "0" ]; then + _summary="OK: ${_summary}" + else + _summary="ERROR: ${_summary}" + _urgency="critical" + fi + fi + su "$SUDO_USER" -c "notify-send --urgency=${_urgency} '${_summary}' '${_body}'" + fi +} + # ------------------------------------------------------------ # check if it was startet with a given user # This is skipped if MS windows was detected with "mingw". diff --git a/localdump.sh b/localdump.sh index a9f20d1681a6bbc283fde956ecd000fe2b1c339b..f9d1b11f2ae569a3a7116e8df613c3337d79bb40 100755 --- a/localdump.sh +++ b/localdump.sh @@ -357,6 +357,7 @@ _j_runHooks "200-before-db-service" . $BACKUP_SCRIPT $mode + test $rc -gt 0 && j_notify "db ${SERVICENAME}" "$BACKUP_SCRIPT $mode was finished with rc=$rc" $rc _j_runHooks "230-after-db-service" "$rc" # ----- post jobs: cleanup diff --git a/plugins/localdump/pgsql.sh b/plugins/localdump/pgsql.sh index fc4848d26f4b5153f50bcf06363cac5435548829..4aa4b547f36caaea6abdad406cdb5e10dd05e5c7 100755 --- a/plugins/localdump/pgsql.sh +++ b/plugins/localdump/pgsql.sh @@ -125,7 +125,7 @@ function restoreByFile(){ checkRequirements >/dev/null 2>&1 if [ $SERVICEFOUND -eq 0 ]; then - echo "__DB__$SERVICENAME INFO: service [$SERVICENAME] is not avilable on this machine." + echo "__DB__$SERVICENAME SKIP: service [$SERVICENAME] is not avilable on this machine." fi case $1 in diff --git a/plugins/localdump/sqlite.sh b/plugins/localdump/sqlite.sh index 64e755ab5a14853da234837160244e2f16a1a2b7..b6bbb85af6558c0848229e4656b0a31d433aaf1f 100755 --- a/plugins/localdump/sqlite.sh +++ b/plugins/localdump/sqlite.sh @@ -31,6 +31,10 @@ FILEDEFS=${DIR_JOBS}/backup-dbfiles.job # make sqlite3 backups of all sqlite = ... in backup-dbfiles.job function doSqliteBackup(){ + if ! _j_getvar ${FILEDEFS} "sqlite" | grep . ; then + echo "__DB__$SERVICENAME SKIP: no entries found for sqlite." + return 0 + fi create_targetdir @@ -56,17 +60,9 @@ function doSqliteBackup(){ # echo -n " to $TARGET " sqlite3 "$DATABASE_FILE" .dump >"${TARGET}" fetchrc >/dev/null - db._compressDumpfile "${TARGET}" - - # $myrc is last returncode - set in fetchrc - # if [ $myrc -eq 0 ]; then - # echo -n "gzip ... " - # compress_file "${TARGET}" - # echo "$DATABASE_FILE" >"${META}" - # else - # echo "ERROR occured - no gzip" - # fi - # ls -l ${TARGET}* + db._compressDumpfile "${TARGET}" && echo "$DATABASE_FILE" >"${META}" + + ls -l ${TARGET}* fi fi diff --git a/transfer.sh b/transfer.sh index 8443e42d6f224962eb9ebba2ef6dfd2548b64bcc..fed5c4231cd7a4130a058f84de62c9d41f512fcd 100755 --- a/transfer.sh +++ b/transfer.sh @@ -406,6 +406,8 @@ function setAction(){ echo t_rcCheckBackup $myrc "${BACKUP_DIR}" + test $myrc -ne 0 && j_notify "Dir ${BACKUP_DIR}" "Backup for ${BACKUP_DIR} failed with rc=$myrc. See log for details: $JOB_LOGFILE" 1 + _j_runHooks "320-after-folder-transfer" "$myrc" fi @@ -451,6 +453,7 @@ function setAction(){ touch "${lastprunefile}" else rc+=1 + j_notify "Prune" "Pruning old data in the repostitory failed." 1 fi ls -l "${lastprunefile}" echo @@ -466,6 +469,7 @@ function setAction(){ touch "${lastverifyfile}" else rc+=1 + j_notify "Verify" "Verify of repository data failed." 1 fi ls -l "${lastverifyfile}" echo