diff --git a/backup.sh b/backup.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e4bcfabdf07088b08d95c4d46b979e7aa01ab959
--- /dev/null
+++ b/backup.sh
@@ -0,0 +1,218 @@
+#!/bin/bash
+# ================================================================================
+#
+# START BACKUP
+#
+# For a backup will be done:
+#   - local dumps for all database types will be performed
+#   - transfer of data directories will be started
+#
+# This script should be added as cronjob (see /etc/cron.d/)
+#
+# --------------------------------------------------------------------------------
+# ah - Axel Hahn <axel.hahn@iml.unibe.ch>
+#
+# 2022-01-14  ah  v1.0
+# ================================================================================
+
+. $( dirname "$0" )/jobhelper.sh
+
+
+  typeset -i rcBackup=0
+  typeset -i rcTransfer=0
+  bStart=1
+
+
+# --------------------------------------------------------------------------------
+# functions
+# --------------------------------------------------------------------------------
+
+function showHelp(){
+    self=$(basename "$0")
+    cat << EOFhelp
+
+START A BACKUP
+
+This script starts 
+  - the database backups and creates dumps locally
+  - the file backup (using restic or duplicity)
+
+SYNTAX:
+
+    $self [OPTIONS]
+
+    default: no parameter:
+
+OPTIONS:
+
+    -d, --dryrun  just show infos, do not start a backup
+    -h, --help    Show this help
+
+    The following parameters force the backup type (if using duplicity):
+    -a, --auto    force auto 
+    -o, --older   duplicity type=auto: repeat full backup if last full 
+                  backup is older than ... like "1M" for 1 month
+    -f, --full    force full backup (if supported)
+    -i, --inc     force incrmenental backup
+
+EOFhelp
+}
+
+# --------------------------------------------------------------------------------
+# checks
+# --------------------------------------------------------------------------------
+
+
+
+  # --- read schedulerdata
+  j_read
+
+
+while [ "$#" -gt 0 ]; 
+do
+
+    case "$1" in
+        -h|-\?|--help)
+            echo "Param: show help"
+            showHelp
+            exit 0
+            ;;
+
+        -d|--dryrun)
+            echo "Param: setting to DRY RUN"
+            bStart=0
+            ;;
+        -a|--auto)
+            echo "Param: setting to AUTO"
+            JOBDEF_TYPE=auto
+            bStart=1
+            ;;
+        -o|--older)
+            echo "Param: setting to AUTO on duplicity"
+            echo "$2" | grep "^[1-9][0-9]*[a-zA-Z]$" 2>/dev/null
+            if [ $? -ne 0 ]; then
+                echo "ERROR: the value after $1 must be a number and a letter (for unit); i.e. 1M for 1 month"
+                exit 1
+            else
+                JOBDEF_TYPE=auto
+                JOBDEF_AUTO=$2
+                shift 1
+            fi
+            ;;
+        -f|--full)
+            echo "Param: setting to FULL"
+            JOBDEF_TYPE=full
+            JOBDEF_AUTO=
+            bStart=1
+            ;;
+        -i|--inc)
+            echo "Param: setting to INC"
+            JOBDEF_TYPE=inc
+            JOBDEF_AUTO=
+            bStart=1
+            ;;
+        *)
+            echo "ERROR: parameter [$1] is unknown"
+            showHelp
+            exit 1 
+    esac
+    shift 1
+
+  done
+
+  # update logfile after param overrides
+  _j_setLogfile
+
+  # show infos
+  cfg_type=$(_j_getvar "${STORAGEFILE}" "type")
+  cfg_full=$(_j_getvar "${STORAGEFILE}" "full")
+  cfg_startfull=$(_j_getvar "${STORAGEFILE}" "start-time-full")
+  
+  test -z "${cfg_full}${cfg_startfull}" || cfg_info="INFO: Type is [auto]; ignoring config for full backup: [full = $cfg_full ... start-time-full = $cfg_startfull]"
+
+  JOBDEF_STARTTIME=$(date +%H%M)
+  STORAGE_BIN=$( _j_getvar "${STORAGEFILE}" "bin" )
+
+  cat << EOFbackupinfo
+
+    CONFIG:
+    Used Backup tool : $STORAGE_BIN
+    Backup type      : $(_j_getvar ${STORAGEFILE} "type")
+      $( test "$cfg_type" = "auto" && echo "$cfg_info" )$( test "$cfg_type" = "auto" || echo "INFO: full backup: $cfg_info" )
+
+    EFFECTIVE:
+    type             : $JOBDEF_TYPE $JOBDEF_AUTO
+    donefile         : $JOB_DONEFILE
+    log              : $JOB_LOGFILE
+
+EOFbackupinfo
+
+  j_requireUser "root"
+
+# --------------------------------------------------------------------------------
+# start backup
+# --------------------------------------------------------------------------------
+
+  if [ $bStart -eq 1 ]; then
+
+    sleep 3
+
+    # ----- local dumps
+
+    echo "INFO: $(date) - starting backup ... type $JOBDEF_TYPE - time $JOBDEF_STARTTIME " | tee -a "$JOB_LOGFILE"
+    touch "${JOB_DONEFILE}.01.START"
+    cat "$JOBFILE" >>"$JOB_LOGFILE"
+
+
+    echo "INFO: $(date) - Making local backups ... ${DIR_SELF}/localdump.sh ALL" | tee -a "$JOB_LOGFILE"
+    "${DIR_SELF}"/localdump.sh ALL | tee -a "$JOB_LOGFILE"
+    rcBackup=$?
+    echo "INFO: $(date) - local backups were finished" | tee -a "$JOB_LOGFILE"
+
+    sleep 2
+
+    # ----- transfer
+
+    if [ -x "${DIR_SELF}/transfer.sh" ]; then
+
+      # transfer files
+      cat "$STORAGEFILE" >>"$JOB_LOGFILE"
+      echo "INFO: $(date) - Sending data to storage... ${DIR_SELF}/transfer.sh $JOBDEF_TYPE" | tee -a "$JOB_LOGFILE"
+      touch "${JOB_DONEFILE}.02.STARTTRANSFER"
+      "${DIR_SELF}"/transfer.sh $JOBDEF_TYPE "$JOBDEF_AUTO" 2>&1 | tee -a "$JOB_LOGFILE"
+      rcTransfer=$?
+
+    else
+      echo "SKIP - transfer.sh not found; all files will be kept on local filesystem only" | tee -a "$JOB_LOGFILE"
+    fi
+
+    rcBackup=$rcBackup+rcTransfer
+
+
+    echo "INFO: $(date) - DONE" | tee -a "$JOB_LOGFILE"
+    touch "${JOB_DONEFILE}.03.DONE"
+
+    echo
+    echo "log for this executed job is"
+    ls -l "$JOB_LOGFILE"
+    echo
+
+    echo "INFO: $(date) - cleanup logs"
+    echo find "${DIR_LOGS}" -mtime +28 -delete -print
+    find "${DIR_LOGS}" -mtime +28 -delete -print
+    echo
+
+    echo STATUS $0 exit with final returncode rc=$rcBackup | tee -a "$JOB_LOGFILE"
+    echo
+
+  else
+    echo "Nothing to do."
+  fi
+
+# --------------------------------------------------------------------------------
+
+
+  exit $rcBackup
+
+
+# --------------------------------------------------------------------------------
diff --git a/backup.sh.banner b/backup.sh.banner
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/check_clientbackup.sh b/check_clientbackup.sh
index d01480d140157d25e2b9933f7a16de59b5959a81..1343cf30d36e43d91df09ff9f3f451d41716a1e7 100755
--- a/check_clientbackup.sh
+++ b/check_clientbackup.sh
@@ -12,23 +12,25 @@
 # ds - Daniel Schueler <daniel.schueler@iml.unibe.ch>
 #
 # 2016-12-09  ah,ds  v1.0
+# 2022-01-19  ah     v1.1  fixes with shellcheck
 # ==============================================================================
 
-. `dirname $0`/jobhelper.sh
+. $(dirname $0)/jobhelper.sh
 
 # ------------------------------------------------------------------------------
 # CONFIG
 # ------------------------------------------------------------------------------
 
 typeset -i iOK=0
-typeset -i iErr=0
-typeset -i rc=0
+
+# limit when to warn if no backup was started
+typeset -i iMaxAgeInHours=96
 
 typeset -i iAge
 typeset -i iAge2
 typeset -i iError
 
-logdir=`dirname $0`/logs
+logdir=$(dirname "$0")/logs
 
 
 # ------------------------------------------------------------------------------
@@ -37,23 +39,23 @@ logdir=`dirname $0`/logs
 
 sShort="Client Backup -"
 
-ls $logdir/*.log >/dev/null
+ls "$logdir"/*.log >/dev/null
 if [ $? -ne 0 ]; then
   sShort="$sShort logs were not found. Backup was never executed"
   iError=1
 else
 
-  iMaxAgeInHours=`j_getLastBackupAge`
-  echo verify last backup: $iMaxAgeInHours h ago
+  # iMaxAgeInHours=`j_getLastBackupAge`
+  # echo verify last backup: $iMaxAgeInHours h ago
 
-  for logfile in `ls -1t $logdir/inc*.log $logdir/full*.log $logdir/auto*.log 2>/dev/null | head -1`
+  for logfile in $(ls -1t "$logdir"/inc*.log "$logdir"/full*.log "$logdir"/auto*.log 2>/dev/null | head -1)
   do
-    echo LAST BACKUP: $logfile
-    grep "final.*rc=" $logfile
+    echo LAST BACKUP: "$logfile"
+    grep "final.*rc=" "$logfile"
     echo
 
     # --- SENSU Statusline
-    if [ -f `dirname $0`/transfer.sh ]; then
+    if [ -f "$(dirname $0)"/transfer.sh ]; then
       sShort="$sShort WITH"
     else
       sShort="$sShort NO"
@@ -61,8 +63,8 @@ else
     sShort="$sShort transfer -"
 
     # --- count returncodes
-    iOK=`grep "final.*rc=0$" $logfile | wc -l`
-    iError=`grep "final.*rc=" $logfile | grep -v "rc=0$" | wc -l`
+    iOK=$(grep -c "final.*rc=0$" "$logfile" )
+    iError=$(grep "final.*rc=" "$logfile" | grep -cv "rc=0$")
     echo "OK: $iOK ... Errors: $iError"
 
     # --- rc=0 must be here
@@ -72,10 +74,10 @@ else
     fi
 
     # --- check age
-    iAge=`date +%s`-`stat -c %Y $logfile`
+    iAge=$( date +%s )-$( date +%s -r "$logfile" )
     iAge2=$iAge/60/60
-    echo age: $iAge sec ... $iAge2 h
-    sShort="$sShort last backup log: `basename $logfile` ($iAge2 h ago) "
+    echo "age: $iAge sec ... $iAge2 h"
+    sShort="$sShort last backup log: $(basename "$logfile") ($iAge2 h ago) "
     if [ $iAge2 -gt $iMaxAgeInHours ]; then
       echo "Error: the last backup is older than $iMaxAgeInHours hours"
       sShort="$sShort ERROR: backup is older than $iMaxAgeInHours hours "
@@ -88,16 +90,16 @@ else
     # --- changes (incremental backups only) and backup status infos
     echo
     sSearch="Args:|ElapsedTime|SourceFiles|SourceFileSize|RawDeltaSize"
-    echo $logfile | fgrep "inc" >/dev/null
+    echo "$logfile" | grep -F "inc" >/dev/null
     if [ $? -eq 0 ]; then
       sSearch="[ADM]\ |${sSearch}"
     fi
     echo --- changes:
-    egrep "^(${sSearch})" $logfile
+    grep -E "^(${sSearch})" "$logfile"
 
     echo
     echo --- summary of backed up directories:
-    grep "DIR\ " $logfile
+    grep "DIR\ " "$logfile"
   
     sShort="$sShort - OK: $iOK ... Errors: $iError"
 
@@ -108,8 +110,8 @@ fi
 
 
 echo
-echo MONITORINFO: $sShort
-echo STATUS $0 - final returncode rc=$iError
-exit $iError
+echo "MONITORINFO: $sShort"
+echo "STATUS $0 - final returncode rc=$iError"
+exit "$iError"
 
 # ------------------------------------------------------------------------------
diff --git a/docs/30_Configuration/10_Database.md b/docs/30_Configuration/10_Database.md
index 92bd48ae938607b7f84e1fced7b8a8ec990943bb..ef1a17aab4e944b1f2bc1293c1163fe4fa3317d3 100644
--- a/docs/30_Configuration/10_Database.md
+++ b/docs/30_Configuration/10_Database.md
@@ -29,18 +29,18 @@ Below that one a directory for the service will be generated; inside that one th
 The number of days how long to keep dumps locally.
 
 Remark:
-To make a database restore its dump must be located at this directory. To restore an older database you need to restore the dump from duplicity first.
+To make a database restore its dump must be located at this directory.
 
-If you have local Mysql daemon or Pgsql you can test it by starting 
+To restore an older database you need to restore the dump from the filke backup first.
+
+If you have local Mysql daemon or Pgsql you can test it by starting
 
 ```
 # dump all databases
 sudo ./localdump.sh
 ```
 
-
 ```
 # show written files
 find /var/iml-backup
 ```
-
diff --git a/docs/30_Configuration/10_backup.job.md b/docs/30_Configuration/10_backup.job.md
index 16c300e0e1c6ae118368bd7a6342391779b0dbf0..dbbb2d4bb371a313fe23d064c88eff9f63b741d2 100644
--- a/docs/30_Configuration/10_backup.job.md
+++ b/docs/30_Configuration/10_backup.job.md
@@ -2,10 +2,7 @@
 
 ```
 # ----------------------------------------------------------------------
-#
-# jobfile backup.job for scheduling
-#
-# ----------------------------------------------------------------------
+# jobfile backup.job for scheduling local dumps
 #
 # SYNTAX: 
 # [variable] = [value]
@@ -16,35 +13,39 @@
 #
 # ----------------------------------------------------------------------
 
+lang = en_us
+
+# ----------------------------------------------------------------------
+# backup type; one of auto|inc
+#   inc:   incremetal and full backups if started on cofigured times
+#   auto:  automatic; =incermental backups; duplicity can start a full 
+#          backup if the last one is older a given range
+# ----------------------------------------------------------------------
+
 # type = inc
 type = auto
 
-lang = en_us
-
 # ----------------------------------------------------------------------
-# scheduling for incremental backup and full backup
+# scheduling for full backup
+# type = inc only
 # for inc and full you can set
 #   DOM:06,22        - days of month
 #   DOW:Mon,Fri      - weekdays
-#   WDM:1st Fri      - nth weekday in month
+#   WDM:WDM:1st Fri  - nth weekday in month
 # ----------------------------------------------------------------------
 
-inc = DOW:Mon,Tue,Wed,Thu,Fri,Sat,Sun
+# full = WDM:3rd Fri
 
-# full backups for type = inc
-full = WDM:3rd Fri
+# when to start a full backup?
+#   regex that must match a time HH:MM somewhen
+#   This will start it if backup ist started between 23:00:00 and 23:09:59
+# start-time-full = 23:0.
 
-# full backups for type = auto
+# ----------------------------------------------------------------------
+# full backups for type = auto AND when using duplicity
+# ----------------------------------------------------------------------
 # auto = <time> i.e. 1M 
-auto = 1M
-
-# --- when to start every day?
-
-# incremental jobs
-#   time HHMM ... multiple values with separated with "," and sorted
-#   10:00,12:00,14:00,16:00,18:00
-start-time-inc = 20:00
-start-time-full = 20:00
+# auto = 1M
 
 
 # ----------------------------------------------------------------------
@@ -59,6 +60,7 @@ keep-days = 7
 dir-dbarchive = /var/localdumps/archive
 
 # ----------------------------------------------------------------------
+
 ```
 
 # Description #
@@ -101,7 +103,7 @@ see also **dir-localdumps**
 ## full ##
 
 Scheduling days for full backup
-For inc and full you can set
+You can set
 
 * day of month: DOM
 * weekday: DOW
@@ -121,16 +123,7 @@ Examples:
 
 `full = WDM:3rd Fri`
 
-See also: **inc**, **start-time-full**
-
-## inc ##
-
-Scheduling days for incremental backup.
-For valid values see description for **full**
-
-`inc = DOW:Mon,Tue,Wed,Thu,Fri,Sat,Sun`
-
-See also: **full**, **start-time-inc**
+See also: **start-time-full**
 
 ## lang ##
 
@@ -141,44 +134,128 @@ Language of the shell. Do not change to keep documented keywords for date and ti
 Time when to start an incrmental backup. This value will be handled
 if a day matches **full** only.
 
-Values are HHMM ... multiple values with separated with `,` and must be sorted.
+The value of start-time-full is a regex then must match a time HH:MM somewhen
+to start a full backup.
 
-If a full time matches an incremental time then full has priority.
+This will start it if backup ist started between 23:00:00 and 23:09:59:
 
-`start-time-full= 22:00`
+`start-time-full= 23:0.`
 
 see also **full**
 
-## start-time-inc ##
-
-Time when to start an incrmental backup. This value will be handled
-if a day matches **inc** only.
-
-Values are HHMM ... multiple values with separated with `,` and must be sorted
-
-If a full time matches an incremental time then full has priority.
-
-`start-time-inc = 10:00,12:00,14:00,16:00,18:00`
+## type ##
 
-see also **inc**
+Type of backup. The value is one of auto|inc
 
-## type ##
+* inc:   incremetal and full backups if started on cofigured times
+* auto:  automatic; =incermental backups; duplicity can start a full backup if the last one is older a given range
 
-Type of backup.
+This setting depends on the possibilities of the used backup tool.
 
 Restic:
 
-* set to "auto"
+* set to "auto" only
 
 Duplicity:
 
+Duplicity supports 2 modes.
+
 * "inc" - make full and incremental backups defined times. You need more values:
   * full = WDM:3rd Fri
-  * inc = DOW:Mon,Tue,Wed,Thu,Fri,Sat,Sun
-  * start-time-full = 20:00
+  * start-time-full = 23:0.
 
 * "auto" - starts incremental backups as long last full backup is not older value in
   * auto = 1M
 
 `type = auto`
 
+see also **bin**
+
+## Example: Restic ##
+
+```text
+lang = en_us
+type = auto
+
+
+# ----------------------------------------------------------------------
+# local dumps; each service has its own subdir there
+# this entry is mapped to BACKUP_TARGETDIR in "localdump.sh"
+# ----------------------------------------------------------------------
+dir-localdumps = /var/localdumps/backup
+keep-days = 7
+
+# archive databases (to keep deleted schemes or databases that 
+# are not backupped anymore; couchdb2 feature)
+dir-dbarchive = /var/localdumps/archive
+
+```
+
+## Example: Duplicity ##
+
+### Full backups on a specified time ###
+
+This can be used on servers where you want to define when the full
+backup must be started.
+
+```text
+lang = en_us
+type = inc
+
+# ----------------------------------------------------------------------
+# scheduling for full backup
+# type = inc only
+# for inc and full you can set
+#   DOM:06,22        - days of month
+#   DOW:Mon,Fri      - weekdays
+#   WDM:WDM:1st Fri  - nth weekday in month
+# ----------------------------------------------------------------------
+
+full = WDM:3rd Fri
+
+# when to start a full backup?
+#   regex that must match a time HH:MM somewhen
+#   This will start it if backup ist started between 23:00:00 and 23:09:59
+start-time-full = 23:0.
+
+
+# ----------------------------------------------------------------------
+# local dumps; each service has its own subdir there
+# this entry is mapped to BACKUP_TARGETDIR in "localdump.sh"
+# ----------------------------------------------------------------------
+dir-localdumps = /var/localdumps/backup
+keep-days = 7
+
+# archive databases (to keep deleted schemes or databases that 
+# are not backupped anymore; couchdb2 feature)
+dir-dbarchive = /var/localdumps/archive
+
+```
+
+### Full backups after a month ###
+
+This can be used on client backups. A full backup automatically starts
+if the last backup is older a given time.
+
+```text
+lang = en_us
+type = auto
+
+# ----------------------------------------------------------------------
+# full backups for type = auto AND when using duplicity
+# ----------------------------------------------------------------------
+# auto = <time> i.e. 1M 
+auto = 1M
+
+
+# ----------------------------------------------------------------------
+# local dumps; each service has its own subdir there
+# this entry is mapped to BACKUP_TARGETDIR in "localdump.sh"
+# ----------------------------------------------------------------------
+dir-localdumps = /var/localdumps/backup
+keep-days = 7
+
+# archive databases (to keep deleted schemes or databases that 
+# are not backupped anymore; couchdb2 feature)
+dir-dbarchive = /var/localdumps/archive
+```
\ No newline at end of file
diff --git a/jobhelper.sh b/jobhelper.sh
index f1b66b6df88aa65aef9b4bdb375ea13d845252bf..130925715778b5817f529b28ba9c361648546d6f 100755
--- a/jobhelper.sh
+++ b/jobhelper.sh
@@ -19,17 +19,14 @@
 # CONFIG
 # ----------------------------------------------------------------------
 
-DIR_SELF=`dirname $0`
+DIR_SELF=$( dirname "$0" )
 
-DIR_JOBS=${DIR_SELF}/jobs
-DIR_LOGS=${DIR_SELF}/logs
-
-JOBFILE=${DIR_JOBS}/backup.job
-DIRFILE=${DIR_JOBS}/dirs.job
-STORAGEFILE=${DIR_JOBS}/transfer.job
-
-# RMQ_SERVER=
+DIR_JOBS="${DIR_SELF}/jobs"
+DIR_LOGS="${DIR_SELF}/logs"
 
+JOBFILE="${DIR_JOBS}/backup.job"
+DIRFILE="${DIR_JOBS}/dirs.job"
+STORAGEFILE="${DIR_JOBS}/transfer.job"
 
 
 # ----------------------------------------------------------------------
@@ -37,15 +34,6 @@ STORAGEFILE=${DIR_JOBS}/transfer.job
 # ----------------------------------------------------------------------
 
 
-# ------------------------------------------------------------
-# IDEA ONLY; UNUSED
-# fetch current job from queue and store it to job directory
-# ------------------------------------------------------------
-function j_fetch(){
-  echo TODO: fetch from RMQ_SERVER
-}
-
-
 # ------------------------------------------------------------
 # init function
 # ------------------------------------------------------------
@@ -53,17 +41,17 @@ function j_init(){
 
   j_banner
 
-  if [ ! -d ${DIR_LOGS} ]; then
-    mkdir -p ${DIR_LOGS} && echo "INFO: dir created ${DIR_LOGS}"
+  if [ ! -d "${DIR_LOGS}" ]; then
+    mkdir -p "${DIR_LOGS}" && echo "INFO: dir created ${DIR_LOGS}"
   fi
 
-  if [ ! -d ${DIR_JOBS} ]; then
+  if [ ! -d "${DIR_JOBS}" ]; then
     # mkdir -p ${DIR_JOBS} && echo "INFO: dir created ${DIR_JOBS}"
     echo "ERROR: missing jobs directory. Aborting."
     exit 1
   fi
 
-  for myfile in ${JOBFILE} ${DIRFILE} ${STORAGEFILE}
+  for myfile in "${JOBFILE}" "${DIRFILE}" "${STORAGEFILE}"
   do
     if [ ! -f "${myfile}" ]; then
       echo "WARNING: missing a config file: $myfile"
@@ -72,7 +60,7 @@ function j_init(){
   done
 
   # for date definitions like weekdays
-  JOBDEF_LANG=`_j_getvar ${JOBFILE} "lang"`
+  JOBDEF_LANG=$(_j_getvar "${JOBFILE}" "lang")
   if [ -z "$JOBDEF_LANG" ]; then
     JOBDEF_LANG="en_us"
   fi
@@ -112,24 +100,24 @@ eofbanner
 # get list of all directories to backup / restore
 # ------------------------------------------------------------
 function j_getDirs2Backup(){
-  STORAGE_SAMBASHARES=`_j_getvar ${STORAGEFILE} "sambashares"`
+  STORAGE_SAMBASHARES=$(_j_getvar "${STORAGEFILE}" "sambashares")
 
-  _j_getvar ${JOBFILE} dir-localdumps
-  _j_getvar ${JOBFILE} dir-dbarchive
-  _j_getvar ${DIRFILE} include
+  _j_getvar "${JOBFILE}" dir-localdumps
+  _j_getvar "${JOBFILE}" dir-dbarchive
+  _j_getvar "${DIRFILE}" include
 
   # get dirs of filesets, i.e.
   # set-custom-[key]--dir = /home/ladmin
-  _j_getvar ${DIRFILE} "set.*--dir"
+  _j_getvar "${DIRFILE}" "set.*--dir"
 
   # detect Samba shares (set sambashares = 1 for it)
-  if [ -z "${STORAGE_SAMBASHARES}" -o ${STORAGE_SAMBASHARES} -eq 0 ]; then
+  if [ -z "${STORAGE_SAMBASHARES}" ] || [ "${STORAGE_SAMBASHARES}" -eq 0 ]; then
     echo NO >/dev/null
   else
     if [ -f /etc/samba/smb.conf ]; then
-      for dirOfShare in `cat /etc/samba/smb.conf  | grep "path.*="  | grep -v "#.*path" | cut -f 2 -d "=" `
+      for dirOfShare in $( grep "path.*=" "/etc/samba/smb.conf" | grep -v "#.*path" | cut -f 2 -d "=" )
       do
-        echo $dirOfShare
+        echo "$dirOfShare"
       done
     fi
   fi
@@ -142,7 +130,7 @@ function j_getDirs2Backup(){
 # param  string  path
 # ------------------------------------------------------------
 function j_getSetnameOfPath(){
-  cat ${DIRFILE} | grep "^set.*dir\ =\ $*$" | cut -f 1 -d "=" | sed "s#\-\-dir\ ##g"
+  grep "^set.*dir\ =\ $*$" "${DIRFILE}" | cut -f 1 -d "=" | sed "s#\-\-dir\ ##g"
 }
 
 # ------------------------------------------------------------
@@ -152,42 +140,17 @@ function j_getSetnameOfPath(){
 # param  string  name of host (for future releases)
 # ------------------------------------------------------------
 function j_getFullTarget(){
-  sTmpSafeName=`j_getSafename "$1"`
+  sTmpSafeName=$(j_getSafename "$1")
   sTmpHostname=$2
-  if [ -z $sTmpHostname ]; then
-    sTmpHostname=`hostname -f`
+  if [ -z "$sTmpHostname" ]; then
+    sTmpHostname=$(hostname -f)
   fi
-  if [ -z ${STORAGE_BASEDIR} ]; then
-    STORAGE_BASEDIR=`_j_getvar ${STORAGEFILE} "storage"`
+  if [ -z "${STORAGE_BASEDIR}" ]; then
+    STORAGE_BASEDIR=$(_j_getvar "${STORAGEFILE}" "storage")
   fi
-  echo ${STORAGE_BASEDIR}/${sTmpHostname}/${sTmpSafeName}
+  echo "${STORAGE_BASEDIR}/${sTmpHostname}/${sTmpSafeName}"
 }
 
-# ------------------------------------------------------------
-# get minimal Age of last backup that had to run in hours
-# it returns a value between 24 and 96
-# ------------------------------------------------------------
-function j_getLastBackupAge(){
-  typeset -i sCmpDate
-  typeset -i iWasInc
-  typeset -i iWasFull
-  j_read
-
-  JOBDEF_INC=`_j_getvar ${JOBFILE} "inc"`
-  JOBDEF_FULL=`_j_getvar ${JOBFILE} "full"`
-
-  for iDeltaH in {24..96}
-  do
-    sCmpDate=`date +%s`-iDeltaH*60*60
-    iWasInc=`_j_wasThisDay @${sCmpDate} $JOBDEF_INC`
-    iWasFull=`_j_wasThisDay @${sCmpDate} $JOBDEF_FULL`
-    if [ ${iWasInc} -gt 0  -o  ${iWasFull} -gt 0 ]; then
-      echo $iDeltaH
-      exit
-    fi
-  done
-  echo $iDeltaH
-}
 
 # ------------------------------------------------------------
 # replace / to _ to get a save filename for a directory to
@@ -195,7 +158,8 @@ function j_getLastBackupAge(){
 # param  string  name of directory
 # ------------------------------------------------------------
 function j_getSafename(){
-  echo $* | sed 's#/#_#g'
+  # echo $* | sed 's#/#_#g'
+  echo "${*//\//_}"
 }
 
 # ------------------------------------------------------------
@@ -208,7 +172,7 @@ function _j_getvar(){
     echo "ERROR: cannot read file: ${1}. Abort."
     exit 100
   fi
-  cat "${1}" | grep "^${2}\ =\ " | cut -f 3- -d " "
+  grep "^${2}\ =\ " < "${1}"| cut -f 3- -d " "
 }
 
 # ------------------------------------------------------------
@@ -226,32 +190,40 @@ function _j_getvar(){
 # param  string  date to compare
 # param  string  value of full|inc in backup.job
 # ------------------------------------------------------------
-function _j_wasThisDay(){
-  typeset -i bToday=0
-  sCompDate="$1"
+function _j_runToday(){
+  typeset -i local bToday=0
+  local sCompDate="$1"
   shift 1
-  value="$*"
+  local value="$*"
 
   # grep weekday
-  echo $value | grep "^DOW:" | grep `date +%a -d $sCompDate` >/dev/null && bToday=1
+  echo "$value" | grep "^DOW:" | grep $(date +%a -d "$sCompDate") >/dev/null && bToday=1
 
   # grep day of month
-  echo $value | grep "^DOM:" | grep `date +%d -d $sCompDate` >/dev/null && bToday=1
+  echo "$value" | grep "^DOM:" | grep $(date +%d -d "$sCompDate") >/dev/null && bToday=1
 
   # grep nth weekday of a month
-  echo $value | grep "^WDM:" >/dev/null
+  echo "$value" | grep "^WDM:" >/dev/null
   if [ $? -eq 0 ]; then
 
-    typeset -i iDayOfMonth=`date +%e -d $sCompDate`
-    typeset -i iWeekday=`date +%u -d $sCompDate`
+    typeset -i local iDayOfMonth
+    iDayOfMonth=$(date +%e -d "$sCompDate")
+
+    typeset -i local iWeekday
+    iWeekday=$(date +%u -d "$sCompDate")
+    
     # `date +%u` - weekday as int; Sun = 0
     # `date +%e` - day in date
-    typeset -i iWeekInMonth=$(echo $(( ( ${iDayOfMonth} - ${iWeekday} + 6 ) / 7 )) )
+    typeset -i local iWeekInMonth
+    iWeekInMonth=$(echo $(( ( ${iDayOfMonth} - ${iWeekday} + 6 ) / 7 )) )
 
-    typeset -i n=`echo $value | grep "^WDM:" | cut -f 2- -d ":" | cut -c 1`
-    sDay=`echo $value | grep "^WDM:" | cut -f 2- -d ":" | cut -f 2 -d " "`
+    typeset -i local n
+    n=$(echo "$value" | grep "^WDM:" | cut -f 2- -d ":" | cut -c 1)
+    
+    local sDay
+    sDay=$(echo "$value" | grep "^WDM:" | cut -f 2- -d ":" | cut -f 2 -d " ")
 
-    if [ ${n} -eq ${iWeekInMonth} -a ${sDay} = `date +%a  -d $sCompDate` ]; then
+    if [ ${n} -eq ${iWeekInMonth} ] && [ ${sDay} = $(date +%a -d "$sCompDate") ]; then
         bToday=1
     fi
   fi
@@ -270,8 +242,8 @@ function _j_wasThisDay(){
 # param  string  value of full|inc in backup.job
 # ------------------------------------------------------------
 function _j_isToday(){
-  sCmpDate=`date +%s`
-  _j_wasThisDay "@$sCmpDate" $*
+  sCmpDate=$(date +%s)
+  _j_runToday "@$sCmpDate" "$*"
 }
 
 # ------------------------------------------------------------
@@ -279,57 +251,52 @@ function _j_isToday(){
 # ------------------------------------------------------------
 function j_read(){
 
-  mytime=`date +%H%M`
+  # mytime=$(date +%H%M)
 
   # --- parse something
 
-  BACKUP_TARGETDIR=`_j_getvar ${JOBFILE} "dir-local-dumps"`
+  BACKUP_TARGETDIR=$(_j_getvar "${JOBFILE}" "dir-local-dumps")
+  export BACKUP_TARGETDIR
 
-  JOBDEF_TYPE=`_j_getvar ${JOBFILE} "type"`
-  if [ -z "$JOBDEF_TYPE" ]; then
-    JOBDEF_TYPE=auto
-  fi
+  JOBDEF_TYPE=$(_j_getvar ${STORAGEFILE} "type")
+  export JOBDEF_TYPE
 
+  if [ "$JOBDEF_TYPE" != "auto" ]; then
 
-  JOBDEF_INC=`_j_getvar ${JOBFILE} "inc"`
-  JOBDEF_FULL=`_j_getvar ${JOBFILE} "full"`
-  JOBDEF_AUTO=`_j_getvar ${JOBFILE} "auto"`
-  if [ "$JOBDEF_TYPE" = "auto" ]; then
-    if [ -z "$JOBDEF_AUTO" ]; then
-      JOBDEF_AUTO=1W
-    fi
-  else
-    JOBDEF_AUTO=
-  fi
+    # ----- detect if current date matches a definition "full = ..."
+    local cfg_full;      cfg_full=$(_j_getvar "${STORAGEFILE}" "full")
+    local bIsTodayFull;  bIsTodayFull=$(_j_isToday "$cfg_full")
 
-  bIsTodayInc=`_j_isToday $JOBDEF_INC`
-  bIsTodayFull=`_j_isToday $JOBDEF_FULL`
+    # ... if "1" ... then verify time with "start-time-full = ...""
+    if [ $bIsTodayFull -eq 1 ]; then
 
+      local sStart
+      sStart=$(_j_getvar "${STORAGEFILE}" "start-time-full")
+      test -z "$sStart}" && sStart="."
+      if date +%H:%M | grep "$sStart" >/dev/null; then
+        JOBDEF_TYPE=full
+      fi
 
-  JOB_DOTODAY=1
-  if [ $bIsTodayFull -eq 0 -a $bIsTodayInc -eq 0 ]; then
-    JOB_DOTODAY=0
+    fi
   fi
 
+  if [ -z "$JOBDEF_TYPE" ]; then
+    JOBDEF_TYPE=auto
+  fi
 
-  sStartInc=`_j_fetchLatestStarttime "start-time-inc"`
-  JOBDEF_STARTTIME=$sStartInc
-
-  if [ $bIsTodayFull -eq 1 ]; then
-    sStartFull=`_j_fetchLatestStarttime "start-time-full"`
+  test "${JOBDEF_TYPE}" = "auto" && JOBDEF_AUTO=$(_j_getvar ${STORAGEFILE} "auto")
+  export JOBDEF_AUTO
 
-    if [ $bIsTodayInc -eq 1  -a  $sStartFull -ne $sStartInc ]; then
-      echo INFO: full backup today $sStartFull - but incremental is at $sStartInc
-      echo -n ""
-    else
-      # echo INFO: full backup today $sStartFull
-      JOBDEF_TYPE="full"
-      JOBDEF_STARTTIME=$sStartFull
-    fi
-  fi
+  _j_setLogfile
+}
 
-  JOB_DONEFILE=${DIR_LOGS}/${JOBDEF_TYPE}-`date +%Y%m%d`-${JOBDEF_STARTTIME}
+# ------------------------------------------------------------
+# read local jobdescription and set as variables
+# ------------------------------------------------------------
+function _j_setLogfile(){
+  JOB_DONEFILE=${DIR_LOGS}/${JOBDEF_TYPE}-$(date +%Y%m%d-%H%M%S)
   JOB_LOGFILE="${JOB_DONEFILE}.log"
+  export JOB_LOGFILE
 }
 
 # ------------------------------------------------------------
@@ -338,29 +305,29 @@ function j_read(){
 # * if empty: take value from start-time
 # param  string  one of start-time-inc|start-time-full
 # ------------------------------------------------------------
-function _j_fetchLatestStarttime(){
-  sLatest=
-  sStart=`_j_getvar ${JOBFILE} "$1" | sed "s#[\ \:\-]##g"`
-  for sTime in `echo $sStart | sed "s#,# #g"`
-  do
-    if [ -z $sLatest ]; then
-      sLatest=$sTime
-    fi
-    if [ $sTime -le `date +%H%M` ]; then
-      sLatest=$sTime
-    fi
-  done
-  if [ -z $sLatest ]; then
-    $sLatest=`_j_getvar ${JOBFILE} "start-time" | sed "s#[\ \:\-]##g"`
-  fi
-  if [ -z $sLatest ]; then
-    color error
-    echo ERROR: missing start time info for $1
-    color reset
-    exit 1
-  fi
-  echo $sLatest
-}
+# function _j_fetchLatestStarttime(){
+#   sLatest=
+#   sStart=`_j_getvar ${JOBFILE} "$1" | sed "s#[\ \:\-]##g"`
+#   for sTime in `echo $sStart | sed "s#,# #g"`
+#   do
+#     if [ -z $sLatest ]; then
+#       sLatest=$sTime
+#     fi
+#     if [ $sTime -le `date +%H%M` ]; then
+#       sLatest=$sTime
+#     fi
+#   done
+#   if [ -z $sLatest ]; then
+#     $sLatest=`_j_getvar ${JOBFILE} "start-time" | sed "s#[\ \:\-]##g"`
+#   fi
+#   if [ -z $sLatest ]; then
+#     color error
+#     echo ERROR: missing start time info for $1
+#     color reset
+#     exit 1
+#   fi
+#   echo $sLatest
+# }
 
 # ------------------------------------------------------------
 # check if a binary exists - and abort if not
@@ -374,7 +341,7 @@ function j_requireBinary(){
   if [ $rcself -ne 0 ]; then
     rc=$rc+$rcself
     echo "INFO: missing binary $1"
-    if [ -z $2 ]; then
+    if [ -z "$2" ]; then
       exit 3
     fi
   fi
@@ -387,12 +354,13 @@ function j_requireBinary(){
 # ------------------------------------------------------------
 function j_requireProcess(){
   # echo "CHECK process $1"
-  ps -ef | grep -v grep | grep -E "$1" >/dev/null
+  # ps -ef | grep -v grep | grep -E "$1" >/dev/null
+  pgrep -l "$1" >/dev/null
   rcself=$?
   if [ $rcself -ne 0 ]; then
     rc=$rc+$rcself
     echo "INFO: missing process $1"
-    if [ -z $2 ]; then
+    if [ -z "$2" ]; then
       exit 4
     fi
   fi
@@ -403,8 +371,8 @@ function j_requireProcess(){
 # param  string  username, i.e. root
 # ------------------------------------------------------------
 function j_requireUser(){
-  sUser=`id | cut -f 2 -d "(" | cut -f 1 -d ")"`
-  if [[ $sUser != "$1" ]]; then
+  sUser=$(id | cut -f 2 -d "(" | cut -f 1 -d ")")
+  if [[ "$sUser" != "$1" ]]; then
     echo "ERROR: user $1 is reqired."
     exit 5
   fi
diff --git a/jobs/backup.job.dist b/jobs/backup.job.dist
index 5da522fa9575bc4b4968177aad2652a39cad3d96..81f253b0d1973114bfb236146588bb85b68bff42 100644
--- a/jobs/backup.job.dist
+++ b/jobs/backup.job.dist
@@ -1,8 +1,5 @@
 # ----------------------------------------------------------------------
-#
-# jobfile backup.job for scheduling
-#
-# ----------------------------------------------------------------------
+# jobfile backup.job for scheduling local dumps
 #
 # SYNTAX: 
 # [variable] = [value]
@@ -13,36 +10,8 @@
 #
 # ----------------------------------------------------------------------
 
-# type = inc
-type = auto
-
 lang = en_us
 
-# ----------------------------------------------------------------------
-# scheduling for incremental backup and full backup
-# for inc and full you can set
-#   DOM:06,22        - days of month
-#   DOW:Mon,Fri      - weekdays
-#   WDM:1st Fri      - nth weekday in month
-# ----------------------------------------------------------------------
-
-inc = DOW:Mon,Tue,Wed,Thu,Fri,Sat,Sun
-
-# full backups for type = inc
-full = WDM:3rd Fri
-
-# full backups for type = auto
-# auto = <time> i.e. 1M 
-auto = 1M
-
-# --- when to start every day?
-
-# incremental jobs
-#   time HHMM ... multiple values with separated with "," and sorted
-#   10:00,12:00,14:00,16:00,18:00
-start-time-inc = 20:00
-start-time-full = 20:00
-
 
 # ----------------------------------------------------------------------
 # local dumps; each service has its own subdir there
diff --git a/jobs/transfer.job.dist b/jobs/transfer.job.dist
index 5255a79ee66d2fcde093c1039b9d96d086d031fc..4e5dfbdad4cf3f1733b6df20e3d6cc56c0f3cae2 100644
--- a/jobs/transfer.job.dist
+++ b/jobs/transfer.job.dist
@@ -67,6 +67,37 @@ bin = restic
 # remark: "gnupg-passphrase = ..." is deprecated 
 passphrase = EnterYourSecretHere
 
+# ----------------------------------------------------------------------
+# backup type; one of auto|inc
+#   inc:   incremetal and full backups if started on cofigured times
+#   auto:  automatic; =incermental backups; duplicity can start a full 
+#          backup if the last one is older a given range
+# ----------------------------------------------------------------------
+
+# type = inc
+type = auto
+
+# ----------------------------------------------------------------------
+# scheduling for full backup
+# type = inc only
+# for inc and full you can set
+#   DOM:06,22        - days of month
+#   DOW:Mon,Fri      - weekdays
+#   WDM:WDM:1st Fri  - nth weekday in month
+# ----------------------------------------------------------------------
+
+# full = WDM:3rd Fri
+
+# when to start a full backup?
+#   regex that must match a time HH:MM somewhen
+#   This will start it if backup ist started between 23:00:00 and 23:09:59
+# start-time-full = 23:0.
+
+# ----------------------------------------------------------------------
+# full backups for type = auto AND when using duplicity
+# ----------------------------------------------------------------------
+# auto = <time> i.e. 1M 
+# auto = 1M
 
 # automatic backup of samba shares (only if samba shares were detected)
 sambashares = 1
diff --git a/plugins/localdump/couchdb.sh b/plugins/localdump/couchdb.sh
index 126a8d12daa10e0ec97c75e116ed7e6a5a12261a..fa35089d0f20ab057e4d103e14fa4498c3eb6e4b 100755
--- a/plugins/localdump/couchdb.sh
+++ b/plugins/localdump/couchdb.sh
@@ -10,10 +10,11 @@
 #
 # 2017-03-24  ah,ds  v0.9  backup
 # 2017-03-27  .....  v1.0  restore
+# 2022-01-20         v1.1  fixes with shellcheck
 # ================================================================================
 
-if [ -z $BACKUP_TARGETDIR ]; then
-  echo ERROR: you cannot start `basename $0` directly
+if [ -z "$BACKUP_TARGETDIR" ]; then
+  echo ERROR: you cannot start $(basename $0) directly
   rc=$rc+1
   exit 1
 fi
@@ -41,10 +42,10 @@ function _couchapi(){
   # sParams="$sParams -u ${couchdbuser}:${couchdbpw}"
   sParams="$sParams -X ${method}"
   sParams="$sParams ${COUCHDB_URL}${apiurl}"
-  if [ ! -z $outfile ]; then
+  if [ ! -z "$outfile" ]; then
     sParams="$sParams -o ${outfile}"
   fi
-  curl $sParams 2>/dev/null
+  curl "$sParams" 2>/dev/null
 }
 
 function _couchGet(){
@@ -52,7 +53,7 @@ function _couchGet(){
 }
 
 function _getDblist(){
-   _couchGet _all_dbs | sed 's#\"#\n#g' | egrep -v "^(\[|\,|\])$"
+   _couchGet _all_dbs | sed 's#\"#\n#g' | grep -Ev "^(\[|\,|\])$"
 }
 
 
@@ -60,11 +61,10 @@ function _getDblist(){
 
 # get valid configured instances
 function getInstances(){
- for mycfg in `ls -1 ~/.iml_backup/couchdb/*.config`
+ for mycfg in $(ls -1 "~/.iml_backup/couchdb/*.config" )
  do
-   . $mycfg
-   if [ $? -eq 0 ]; then
-     echo `basename "${mycfg}" | cut -f 1 -d "."`
+   if . "$mycfg"; then
+     echo $(basename "${mycfg}" | cut -f 1 -d ".")
    fi
  done
 }
@@ -75,25 +75,24 @@ function getInstances(){
 # param  string  name of the instance to load
 function loadInstance(){
   COUCHDB_URL=
-  . ~/.iml_backup/couchdb/${1}.config
-  if [ $? -ne 0 ]; then
+  if ! . "~/.iml_backup/couchdb/${1}.config" ; then
     color error
     echo ERROR: invalid instance: $1 - the config file cannot be sourced
     color reset
     exit 1
   fi
-  if [ -z ${COUCHDB_URL} ]; then
+  if [ -z "${COUCHDB_URL}" ]; then
     color error
-    echo ERROR: invalid instance: $1 - the config file has no COUCHDB_URL
+    echo "ERROR: invalid instance: $1 - the config file has no COUCHDB_URL"
     color reset
     exit 1
   fi
 
   # parse ${COUCHDB_URL} ...
-  couchdbhost=`echo ${COUCHDB_URL} | cut -f 3 -d "/" | cut -f 2 -d "@" | cut -f 1 -d ":"`
-  couchdbport=`echo ${COUCHDB_URL} | cut -f 3 -d "/" | cut -f 2 -d "@" | cut -f 2 -d ":"`
-  couchdbuser=`echo ${COUCHDB_URL} | cut -f 3 -d "/" | cut -f 1 -d "@" | cut -f 1 -d ":"`
-  couchdbpw=`echo ${COUCHDB_URL} | cut -f 3 -d "/" | cut -f 1 -d "@" | cut -f 2 -d ":"`
+  couchdbhost=$(echo "${COUCHDB_URL}" | cut -f 3 -d "/" | cut -f 2 -d "@" | cut -f 1 -d ":")
+  couchdbport=$(echo "${COUCHDB_URL}" | cut -f 3 -d "/" | cut -f 2 -d "@" | cut -f 2 -d ":")
+  couchdbuser=$(echo "${COUCHDB_URL}" | cut -f 3 -d "/" | cut -f 1 -d "@" | cut -f 1 -d ":")
+  couchdbpw=$(  echo "${COUCHDB_URL}" | cut -f 3 -d "/" | cut -f 1 -d "@" | cut -f 2 -d ":")
 
 }
 
@@ -103,14 +102,12 @@ function loadInstance(){
 # backup with loop over instances
 function doBackup(){
   # for mycfg in `ls -1 ~/.iml_backup/couchdb/*.config`
-  for COUCHDB_INSTANCE in `getInstances`
+  for COUCHDB_INSTANCE in $(getInstances)
   do
-    loadInstance $COUCHDB_INSTANCE
+    loadInstance "$COUCHDB_INSTANCE"
 
-      echo --- instance: $COUCHDB_INSTANCE
-      curl --head -X GET $COUCHDB_URL 2>/dev/null | grep "^HTTP.*\ 200\ "
-
-      if [ $? -eq 0 ]; then
+      echo "--- instance: $COUCHDB_INSTANCE"
+      if curl --head -X GET "$COUCHDB_URL" 2>/dev/null | grep "^HTTP.*\ 200\ "; then
 
         _doBackupOfSingleInstance
 
@@ -119,12 +116,12 @@ function doBackup(){
         color error
         echo ERROR: couch DB instance is not available or canot be accessed with these credentials in config file
         # repeat curl to show the error message
-        curl -X GET $COUCHDB_URL
+        curl -X GET "$COUCHDB_URL"
         color reset
       fi
 
     echo
-    echo --- `date` done.
+    echo "--- $(date) done."
     echo
   done
 }
@@ -135,29 +132,29 @@ function doBackup(){
 function _doBackupOfSingleInstance(){
 
   create_targetdir
-  mkdir -p ${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE} 2>/dev/null
+  mkdir -p "${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE}" 2>/dev/null
 
   echo
   echo "    DUMP databases of instance ${COUCHDB_INSTANCE}"
   echo "    couchdbhost $couchdbhost on port $couchdbport with user $couchdbuser"
   echo
 
-  for dbname in `_getDblist`
+  for dbname in $(_getDblist)
   do
-    echo ----- `date` ${COUCHDB_INSTANCE} -- ${dbname}
-    OUTFILE=${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE}/`get_outfile ${dbname}`.couchdbdump
-    python ${dirPythonPackages}/couchdb/tools/dump.py ${COUCHDB_URL}/${dbname} >${OUTFILE}
+    echo "----- $(date) ${COUCHDB_INSTANCE} -- ${dbname}"
+    OUTFILE=${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE}/$(get_outfile "${dbname}").couchdbdump
+    python ${dirPythonPackages}/couchdb/tools/dump.py "${COUCHDB_URL}/${dbname}" >"${OUTFILE}"
 
     fetchrc
 
     # $myrc is last returncode - set in fetchrc
     if [ $myrc -eq 0 ]; then
       echo -n "gzip ... "
-      compress_file $OUTFILE
+      compress_file "$OUTFILE"
     else
       echo "ERROR occured - no gzip"
     fi
-    ls -l $OUTFILE*
+    ls -l "$OUTFILE"*
     echo
   done
 
@@ -173,10 +170,10 @@ function restoreByFile(){
   echo
   h2 "analyze dump $sMyfile"
 
-  COUCHDB_INSTANCE=`echo $sMyfile | sed "s#${BACKUP_TARGETDIR}##g" | sed "s#\./##g" | sed "s#^/##g" | cut -f 1 -d "/"`
+  COUCHDB_INSTANCE=$(echo "$sMyfile" | sed "s#${BACKUP_TARGETDIR}##g" | sed "s#\./##g" | sed "s#^/##g" | cut -f 1 -d "/")
   echo "detected COUCHDB_INSTANCE   : [${COUCHDB_INSTANCE}]"
-  if [ -z $sMyDb ]; then
-    sMyDb=`guessDB $sMyfile`
+  if [ -z "$sMyDb" ]; then
+    sMyDb=$(guessDB "$sMyfile")
     echo "detected db schema from file: [${sMyDb}]"
   else
     echo "db schema from param 2: [${sMyDb}]"
@@ -184,14 +181,13 @@ function restoreByFile(){
 
   echo
 
-  loadInstance $COUCHDB_INSTANCE
+  loadInstance "$COUCHDB_INSTANCE"
 
-  echo connect $couchdbhost on port $couchdbport with user $couchdbuser
-  curl --head -X GET $COUCHDB_URL 2>/dev/null | grep "^HTTP.*\ 200\ " >/dev/null
-  if [ $? -ne 0 ]; then
+  echo "connect $couchdbhost on port $couchdbport with user $couchdbuser"
+  if ! curl --head -X GET "$COUCHDB_URL" 2>/dev/null | grep "^HTTP.*\ 200\ " >/dev/null; then
     color error
     echo ERROR: couch DB instance is not available
-    curl -X GET $COUCHDB_URL
+    curl -X GET "$COUCHDB_URL"
     color reset
     exit 1
   fi
@@ -207,21 +203,21 @@ function restoreByFile(){
   #   echo DB exists ... need to drop it first
   # fi
 
-  h2 deleting database [$sMyDb] ...
+  h2 "deleting database [$sMyDb] ..."
   color cmd
-  _couchapi DELETE $sMyDb
+  _couchapi DELETE "$sMyDb"
   fetchrc
   color reset
 
-  h2 creating database [$sMyDb] ...
+  h2 "creating database [$sMyDb] ..."
   color cmd
-  _couchapi PUT $sMyDb
+  _couchapi PUT "$sMyDb"
   fetchrc
   color reset
 
   h2 import file ...
   color cmd
-  zcat ${sMyfile} | python ${dirPythonPackages}/couchdb/tools/load.py $COUCHDB_URL/$sMyDb
+  zcat "${sMyfile}" | python ${dirPythonPackages}/couchdb/tools/load.py "$COUCHDB_URL/$sMyDb"
   fetchrc
   color reset
   echo
@@ -276,6 +272,6 @@ else
 fi
 
 
-echo $0 $* [couchdb] final returncode rc=$rc
+echo "$0 $* [couchdb] final returncode rc=$rc"
 
 # --------------------------------------------------------------------------------
diff --git a/plugins/localdump/couchdb2.sh b/plugins/localdump/couchdb2.sh
index 8ed3092003e2f816878b9c1499806e122bd631fe..358c162caf15c0e4c7262879ec0e9abe9aa905d5 100755
--- a/plugins/localdump/couchdb2.sh
+++ b/plugins/localdump/couchdb2.sh
@@ -15,10 +15,11 @@
 #                          ./localdump.sh backup couchdb2 demo
 # 2021-10-11  .....  v1.2  added fastmode in restore: no test connect, do not 
 #                          delete DB before create request
+# 2022-01-20         v1.3  fixes with shellcheck
 # ================================================================================
 
-if [ -z $BACKUP_TARGETDIR ]; then
-  echo ERROR: you cannot start `basename $0` directly
+if [ -z "$BACKUP_TARGETDIR" ]; then
+  echo "ERROR: you cannot start $(basename "$0") directly"
   rc=$rc+1
   exit 1
 fi
@@ -33,7 +34,7 @@ CFGDIR=~/.iml_backup/couchdb2
 # UNUSED
 # dirPythonPackages=/usr/lib/python2.7/site-packages
 
-ARCHIVE_DIR=`_j_getvar ${JOBFILE} dir-dbarchive`/couchdb2
+ARCHIVE_DIR=$(_j_getvar "${JOBFILE}" dir-dbarchive)/couchdb2
 
 # --------------------------------------------------------------------------------
 # FUNCTIONS
@@ -51,20 +52,20 @@ function _couchapi(){
   # sParams="$sParams -u ${couchdbuser}:${couchdbpw}"
   sParams="$sParams -X ${method}"
   sParams="$sParams ${COUCH_URL}${apiurl}"
-  if [ ! -z $outfile ]; then
+  if [ ! -z "$outfile" ]; then
     sParams="$sParams -o ${outfile}"
   fi
-  curl $sParams 2>/dev/null
+  curl "$sParams" 2>/dev/null
 }
 
 function _getDblist(){
-   _couchapi GET _all_dbs | sed 's#\"#\n#g' | egrep -v "^(\[|\,|\])$" | grep -v _replicator | grep -v _global_changes
+   _couchapi GET _all_dbs | sed 's#\"#\n#g' | grep -Ev "^(\[|\,|\])$" | grep -v _replicator | grep -v _global_changes
 }
 
 # get value update_seq of given couchdb name
 function _getDbSeq(){
   # _couchapi GET $1 | sed 's#,\"#\n"#g' | egrep -v "^(\[|\,|\])$" | grep update_seq | cut -f 4 -d '"'
-  _couchapi GET $1 | sed 's#,\"#\n"#g' | egrep -v "^(\[|\,|\])$" | grep update_seq | cut -f 4 -d '"' | cut -f 1 -d '-'
+  _couchapi GET "$1" | sed 's#,\"#\n"#g' | grep -Ev "^(\[|\,|\])$" | grep update_seq | cut -f 4 -d '"' | cut -f 1 -d '-'
 }
 
 
@@ -72,11 +73,10 @@ function _getDbSeq(){
 
 # get valid configured instances
 function getInstances(){
- for mycfg in `ls -1 ${CFGDIR}/*${1}*.config`
+ for mycfg in $(ls -1 ${CFGDIR}/*${1}*.config)
  do
-   . $mycfg
-   if [ $? -eq 0 ]; then
-     echo `basename "${mycfg}" | cut -f 1 -d "."`
+   if . "$mycfg"; then
+     echo $(basename "${mycfg}" | cut -f 1 -d ".")
    fi
  done
 }
@@ -87,16 +87,15 @@ function getInstances(){
 # param  string  name of the instance to load
 function loadInstance(){
   COUCH_URL=
-  . ${CFGDIR}/${1}.config
-  if [ $? -ne 0 ]; then
+  if ! . "${CFGDIR}/${1}.config"; then
     color error
     echo ERROR: invalid instance: $1 - the config file cannot be sourced
     color reset
     exit 1
   fi
-  if [ -z ${COUCH_URL} ]; then
+  if [ -z "${COUCH_URL}" ]; then
     color error
-    echo ERROR: invalid instance: $1 - the config file has no COUCH_URL
+    echo "ERROR: invalid instance: $1 - the config file has no COUCH_URL"
     color reset
     exit 1
   fi
@@ -110,14 +109,12 @@ function loadInstance(){
 # param 1  string  globbing filter to config files
 function doBackup(){
   # for mycfg in `ls -1 ~/.iml_backup/couchdb/*.config`
-  for COUCHDB_INSTANCE in `getInstances $1`
+  for COUCHDB_INSTANCE in $(getInstances $1)
   do
-    loadInstance $COUCHDB_INSTANCE
+    loadInstance "$COUCHDB_INSTANCE"
 
-      echo --- instance: $COUCHDB_INSTANCE
-      curl --head -X GET $COUCH_URL 2>/dev/null | grep "^HTTP.*\ 200\ "
-
-      if [ $? -eq 0 ]; then
+      echo "--- instance: $COUCHDB_INSTANCE"
+      if curl --head -X GET "$COUCH_URL" 2>/dev/null | grep "^HTTP.*\ 200\ "; then
         echo OK, connected.
         sleep 2
         _doBackupOfSingleInstance
@@ -125,14 +122,14 @@ function doBackup(){
       else
         rc=$rc+1
         color error
-        echo ERROR: couch DB instance is not available or canot be accessed with these credentials in config file
+        echo "ERROR: couch DB instance is not available or canot be accessed with these credentials in config file"
         # repeat curl to show the error message
-        curl -X GET $COUCH_URL
+        curl -X GET "$COUCH_URL"
         color reset
       fi
 
     echo
-    echo --- `date` done.
+    echo "--- $(date) done."
     echo
   done
 }
@@ -143,11 +140,11 @@ function doBackup(){
 function _doBackupOfSingleInstance(){
 
   create_targetdir
-  mkdir -p ${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE} 2>/dev/null
-  mkdir -p ${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/seq 2>/dev/null
+  mkdir -p "${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE}" 2>/dev/null
+  mkdir -p "${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/seq" 2>/dev/null
 
-  local ARCHIVE_DIR2=${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/deleted_databases
-  test -d "${ARCHIVE_DIR2}" || mkdir -p ${ARCHIVE_DIR2} 2>/dev/null
+  local ARCHIVE_DIR2="${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/deleted_databases"
+  test -d "${ARCHIVE_DIR2}" || mkdir -p "${ARCHIVE_DIR2}" 2>/dev/null
 
   echo
   echo "    MOVE deleted databases into ${ARCHIVE_DIR2}"
@@ -155,25 +152,24 @@ function _doBackupOfSingleInstance(){
 
   # get a list of current databases
   dblist=/tmp/couch_list_${COUCHDB_INSTANCE}.txt
-  _getDblist > $dblist
-  ls -l $dblist
+  _getDblist > "$dblist"
+  ls -l "$dblist"
 
   # detect deleted databases: 
-  for dumpfile in $( find ${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/ -maxdepth 1 -type f -name "*.couchdbdump.gz" )
+  for dumpfile in $( find "${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/" -maxdepth 1 -type f -name "*.couchdbdump.gz" )
   do
       dbname=$( basename $dumpfile | sed "s#\.couchdbdump\.gz##g" )
-        grep "^${dbname}" $dblist >/dev/null
-      if [ $? -ne 0 ]; then
+      if ! grep "^${dbname}" "$dblist" >/dev/null; then
               SEQFILE=${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/seq/__seq__${dbname}
               echo "DELETED $dbname ... $( ls -l ${dumpfile} | cut -f 5- -d ' ' )"
-              mv ${dumpfile} ${ARCHIVE_DIR2}
-              rm -f ${SEQFILE}
+              mv "${dumpfile}" "${ARCHIVE_DIR2}"
+              rm -f "${SEQFILE}"
       fi
   done
   # done | tee /tmp/couch_archive_${COUCHDB_INSTANCE}.txt
   echo
 
-  typeset -i iDbTotal=$( cat $dblist | wc -l )
+  typeset -i iDbTotal=$( cat "$dblist" | wc -l )
   typeset -i iDb=0
 
   echo
@@ -182,48 +178,48 @@ function _doBackupOfSingleInstance(){
   echo "      ARCHIVE ${ARCHIVE_DIR}/${COUCHDB_INSTANCE}"
   echo
 
-  for dbname in $( cat $dblist )
+  for dbname in $( cat "$dblist" )
   do
     iDb=$iDb+1
-    echo -n "----- `date` ${COUCHDB_INSTANCE} -- $iDb of $iDbTotal - ${dbname} - "
-    OUTFILE=${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE}/`get_outfile ${dbname}`.couchdbdump
+    echo -n "----- $(date) ${COUCHDB_INSTANCE} -- $iDb of $iDbTotal - ${dbname} - "
+    OUTFILE=${BACKUP_TARGETDIR}/${COUCHDB_INSTANCE}/$(get_outfile "${dbname}").couchdbdump
     ARCHIVFILE=${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/${dbname}.couchdbdump.gz
     SEQFILE=${ARCHIVE_DIR}/${COUCHDB_INSTANCE}/seq/__seq__${dbname}
 
-    sSequenceCurrent=`_getDbSeq ${dbname}`
-    sSequenceLast=`cat ${SEQFILE} 2>/dev/null | cut -f 1 -d '-'`
+    sSequenceCurrent=$(_getDbSeq "${dbname}")
+    sSequenceLast=$(cat "${SEQFILE}" 2>/dev/null | cut -f 1 -d '-')
 #    sSequenceLast=`cat ${SEQFILE} 2>/dev/null | tr -d '\n'`
 
     # echo
     # echo "update_seq --+-- current [${sSequenceCurrent}]" 
     # echo "             +-- backup  [${sSequenceLast}]"
     if [ "${sSequenceCurrent}" = "${sSequenceLast}" ]; then
-      echo SKIP: still on sequence ${sSequenceLast}
+      echo SKIP: still on sequence "${sSequenceLast}"
     else
       echo
       echo "update_seq --+-- current [${sSequenceCurrent}]" 
       echo "             +-- backup  [${sSequenceLast}]"
       echo -n "Need to backup ... "
-      couchbackup --db ${dbname} >${OUTFILE}.progress 2>/dev/null && mv ${OUTFILE}.progress ${OUTFILE}
+      couchbackup --db "${dbname}" >"${OUTFILE}".progress 2>/dev/null && mv "${OUTFILE}".progress "${OUTFILE}"
       fetchrc
 
       # $myrc is last returncode - set in fetchrc
       if [ $myrc -eq 0 ]; then
         echo -n "gzip ... "
-        compress_file $OUTFILE
+        compress_file "$OUTFILE"
         fetchrc
         if [ $myrc -eq 0 ]; then
-          cp ${OUTFILE}* ${ARCHIVFILE} && echo ${sSequenceCurrent}>${SEQFILE}
-          ls -l ${ARCHIVFILE} ${SEQFILE}
+          cp "${OUTFILE}"* "${ARCHIVFILE}" && echo "${sSequenceCurrent}">"${SEQFILE}"
+          ls -l "${ARCHIVFILE}" "${SEQFILE}"
         fi
       else
         echo "ERROR occured while dumping - abort"
       fi
-      ls -l $OUTFILE*
+      ls -l "$OUTFILE"*
       echo
     fi
   done
-  rm -f $dblist
+  rm -f "$dblist"
 }
 
 # ---------- RESTORE
diff --git a/plugins/localdump/mysql.sh b/plugins/localdump/mysql.sh
index 87153e9bbb3985e95e6286e0a918af091aea6dfd..7215c7fcf239dc4becd0b59b52145b826eea4f02 100755
--- a/plugins/localdump/mysql.sh
+++ b/plugins/localdump/mysql.sh
@@ -10,10 +10,11 @@
 #
 # 2016-11-10  ah,ds  v0.8  needs to be testet
 # 2017-03-28  .....  v1.0  added restore
+# 2022-01-20         v1.1  fixes with shellcheck
 # ================================================================================
 
-if [ -z $BACKUP_TARGETDIR ]; then
-  echo ERROR: you cannot start `basename $0` directly
+if [ -z "$BACKUP_TARGETDIR" ]; then
+  echo "ERROR: you cannot start $(basename $0) directly"
   rc=$rc+1
   exit 1
 fi
@@ -33,33 +34,33 @@ function doMysqlBackup(){
 
   create_targetdir
 
-  for DATABASE_DIR in `find $SOURCE_DIR/* -type d -prune`;
+  for DATABASE_DIR in $(find $SOURCE_DIR/* -type d -prune);
   do
     # DATABASE=`basename $DATABASE_DIR`
-    DATABASE=`basename $DATABASE_DIR | sed "s#\@002d#-#g" `
-    TABLECOUNT=`find $DATABASE_DIR/. -type f -name *frm | wc -l`
-    echo --- database $DATABASE - $TABLECOUNT tables
+    DATABASE=$(basename "$DATABASE_DIR" | sed "s#\@002d#-#g" )
+    TABLECOUNT=$(find "$DATABASE_DIR"/. -type f -name "*frm" | wc -l)
+    echo "--- database $DATABASE - $TABLECOUNT tables"
     if [ $TABLECOUNT -gt 0 ]; then
       echo -n "backup ... "
-      OUTFILE=${BACKUP_TARGETDIR}/`get_outfile ${DATABASE}`.sql
+      OUTFILE="${BACKUP_TARGETDIR}/$(get_outfile ${DATABASE}).sql"
       # task 1251 - "--master-data=2" was removed
       $MYSQLDUMP --opt \
                --default-character-set=utf8 \
                --flush-logs \
                --single-transaction \
                --no-autocommit \
-               --result-file=$OUTFILE \
-               $DATABASE
+               --result-file="$OUTFILE" \
+               "$DATABASE"
       fetchrc
 
       # $myrc is last returncode - set in fetchrc
       if [ $myrc -eq 0 ]; then
         echo -n "gzip ... "
-        compress_file $OUTFILE
+        compress_file "$OUTFILE"
       else
         echo "ERROR occured - no gzip"
       fi
-      ls -l $OUTFILE*
+      ls -l "$OUTFILE"*
     fi
     echo
   done
@@ -73,9 +74,9 @@ function restoreByFile(){
   sMyfile=$1
   sMyDb=$2
 
-  if [ -z $sMyDb ]; then
+  if [ -z "$sMyDb" ]; then
     h2 "analyze dump $sMyfile"
-    sMyDb=`guessDB $sMyfile`
+    sMyDb=$(guessDB $sMyfile)
     echo "detected db schema from file: [${sMyDb}]"
   else
     echo "db schema from param 2: [${sMyDb}]"
@@ -83,7 +84,7 @@ function restoreByFile(){
 
   echo
 
-  echo import to $sMyDb...
+  echo import to "$sMyDb"...
 
   h2 ensure that database exists ...
   color cmd
@@ -91,10 +92,10 @@ function restoreByFile(){
   color reset
 
   h2 import ...
-  ls -l $sMyfile
-  echo import to database [${sMyDb}]
+  ls -l "$sMyfile"
+  echo "import to database [${sMyDb}]"
   color cmd
-  zcat $sMyfile | mysql "${sMyDb}"
+  zcat "$sMyfile" | mysql "${sMyDb}"
   fetchrc
   color reset
 
@@ -130,6 +131,6 @@ else
   fi
 fi
 
-echo $0 $* [mysql] final returncode rc=$rc
+echo "$0 $* [mysql] final returncode rc=$rc"
 
 # --------------------------------------------------------------------------------
diff --git a/plugins/localdump/pgsql.sh b/plugins/localdump/pgsql.sh
index bbc9549f1dd0c422c253440053309325055cb918..5a896508dc34ba4d221020a984b3e0a7abfdd1ba 100755
--- a/plugins/localdump/pgsql.sh
+++ b/plugins/localdump/pgsql.sh
@@ -1,5 +1,7 @@
 #!/bin/bash
-# ================================================================================
#
# LOCALDUMP :: POSTGRES
+# ================================================================================
+#
+# LOCALDUMP :: POSTGRES
 # create gzipped plain text backups from each scheme
 #
 # --------------------------------------------------------------------------------
@@ -8,12 +10,14 @@
 #
 # 2016-11-10  ah,ds  v1.0
 # 2017-03-29  .....  v1.1  added restore
+# 2022-01-20         v1.2  fixes with shellcheck
 # ================================================================================
 
-  if [ -z $BACKUP_TARGETDIR ]; then
-    echo ERROR: you cannot start `basename $0` directly
-    exit 1
-  fi
+if [ -z "$BACKUP_TARGETDIR" ]; then
+  echo "ERROR: you cannot start $(basename $0) directly"
+  rc=$rc+1
+  exit 1
+fi
 
 # --------------------------------------------------------------------------------
 # CONFIG
@@ -32,29 +36,29 @@ function doPgsqlBackup(){
   create_targetdir
 
   # ----- change owner of directory because of su command
-  chown ${PGUSER}. ${BACKUP_TARGETDIR}
+  chown ${PGUSER}. "${BACKUP_TARGETDIR}"
 
   # ----- GO
 
   sSqlGetDblist="select datname from pg_database where not datistemplate and datallowconn order by datname;"
-  for DATABASE in `su ${PGUSER} -c "psql -At -c '$sSqlGetDblist' postgres" 2>/dev/null`
+  for DATABASE in $(su ${PGUSER} -c "psql -At -c '$sSqlGetDblist' postgres" 2>/dev/null)
   do
     echo "----- $DATABASE"
     echo -n "backup ... "
-    OUTFILE=${BACKUP_TARGETDIR}/`get_outfile ${DATABASE}`.sql
+    OUTFILE="${BACKUP_TARGETDIR}/$(get_outfile ${DATABASE}).sql"
     su ${PGUSER} -c "pg_dump -Fp ${DATABASE} >$OUTFILE"
     fetchrc
 
     if [ $myrc -eq 0 ]; then
       echo -n "compress ... "
-      compress_file $OUTFILE
+      compress_file "$OUTFILE"
     else
       color error
       echo "ERROR occured - no gzip"
       color reset
       # mv $OUTFILE $OUTFILE
     fi
-    ls -l $OUTFILE*
+    ls -l "$OUTFILE"*
   echo
   done
 }
@@ -67,9 +71,9 @@ function restoreByFile(){
   sMyfile=$1
   sMyDb=$2
 
-  if [ -z $sMyDb ]; then
+  if [ -z "$sMyDb" ]; then
     h2 "analyze dump $sMyfile"
-    sMyDb=`guessDB $sMyfile`
+    sMyDb="$(guessDB "$sMyfile" )"
     echo "detected db schema from file: [${sMyDb}]"
   else
     echo "db schema from param 2: [${sMyDb}]"
@@ -77,22 +81,22 @@ function restoreByFile(){
 
   echo
 
-  echo import to $sMyDb...
+  echo import to "$sMyDb"...
 
-  h2 deleting database [$sMyDb] ...
+  h2 "deleting database [$sMyDb] ..."
   color cmd
   su ${PGUSER} -c "dropdb ${sMyDb}"
   color reset
 
-  h2 ensure that database exists ...
+  h2 "ensure that database exists ..."
   color cmd
   su ${PGUSER} -c "psql -c \"CREATE DATABASE ${sMyDb};\""
   fetchrc
   color reset
 
-  h2 import ...
+  h2 "import ..."
   ls -l "${sMyfile}"
-  echo import to database [${sMyDb}]
+  echo "import to database [${sMyDb}]"
   color cmd
   zcat "${sMyfile}" | su ${PGUSER} -c "psql -d ${sMyDb}"
   fetchrc
@@ -123,6 +127,6 @@ function restoreByFile(){
 
   fi
 
-  echo $0 $* [postgres] final returncode rc=$rc
+  echo "$0 $* [postgres] final returncode rc=$rc"
 
 # --------------------------------------------------------------------------------
diff --git a/scheduler.sh b/scheduler.sh
index 1104e356bdd302fee275f8e97d22bbf2bffd3a74..808e0179fe8e79a577ac5dc0a88b45f1d9c20f7b 100755
--- a/scheduler.sh
+++ b/scheduler.sh
@@ -25,135 +25,8 @@
 # 2016-11-10  ah,ds  v1.0
 # ================================================================================
 
-. `dirname $0`/jobhelper.sh
-
-
-  typeset -i rcScheduler=0
-  typeset -i rcTransfer=0
-  bStart=1
-
-
-# --------------------------------------------------------------------------------
-# checks
-# --------------------------------------------------------------------------------
-
-  j_requireUser "root"
-
-  # --- read schedulerdata
-  j_read
-
-
-  cat << EOFschedulerinfo
-
-    execution times: $JOBDEF_INC at `_j_getvar ${JOBFILE} "start-time-inc"`
-    Full backup    : $JOBDEF_FULL at `_j_getvar ${JOBFILE} "start-time-full"`
-
-    do today = $JOB_DOTODAY
-
-    type = $JOBDEF_TYPE $JOBDEF_AUTO
-
-    starttime = $JOBDEF_STARTTIME
-    stopfile = $JOB_DONEFILE
-
-EOFschedulerinfo
-
-  if [ $JOB_DOTODAY -eq 0 ]; then
-    echo "ABORT: nothing to do today"
-    bStart=0
-  fi
-
-
-  # check: Backup bereits gelaufen?
-  ls -l ${JOB_DONEFILE}.* 2>/dev/null
-  if [ $? -eq 0 ]; then
-    echo
-    echo "ABORT: $JOB_DONEFILE was found"
-    bStart=0
-  else
-    echo "Job was not started yet."
-  fi
-
-
-  if [ ${JOBDEF_STARTTIME} -gt ${mytime} ]; then
-    echo "INFO: waiting for next run at ${JOBDEF_STARTTIME} ..."
-    bStart=0
-  fi
-
-  if [ "$1" = "-f" -o  "$1" = "--force" ]; then
-    echo FORCE parameter detected.
-    echo Overriding settings to make an incremental backup now.
-
-    JOB_DOTODAY=1
-    JOBDEF_TYPE=inc
-    JOBDEF_STARTTIME=`date +%H%M`
-    bStart=1
-
-  fi
-
-  if [ "$1" = "-d" -o  "$1" = "--dryrun" ]; then
-     echo DRYRUN parameter detected
-     bStart=0
-  fi
-
-# --------------------------------------------------------------------------------
-
-  if [ $bStart -eq 1 ]; then
-
-    # ----- local dumps
-
-    echo "INFO: `date` - starting backup ... type $JOBDEF_TYPE - time $JOBDEF_STARTTIME " | tee -a $JOB_LOGFILE
-    touch "${JOB_DONEFILE}.01.START"
-    cat $JOBFILE >>$JOB_LOGFILE
-
-
-    echo "INFO: `date` - Making local backups ... ${DIR_SELF}/localdump.sh ALL" | tee -a $JOB_LOGFILE
-    ${DIR_SELF}/localdump.sh ALL | tee -a $JOB_LOGFILE
-    rcScheduler=$?
-    echo "INFO: `date` - local backups were finished" | tee -a $JOB_LOGFILE
-
-    sleep 2
-
-    # ----- transfer
-
-    if [ -x "${DIR_SELF}/transfer.sh" ]; then
-
-      # transfer files
-      echo "INFO: `date` - Sending data to storage... ${DIR_SELF}/transfer.sh $JOBDEF_TYPE" | tee -a $JOB_LOGFILE
-      touch "${JOB_DONEFILE}.02.STARTTRANSFER"
-      ${DIR_SELF}/transfer.sh $JOBDEF_TYPE $JOBDEF_AUTO 2>&1 | tee -a $JOB_LOGFILE
-      rcTransfer=$?
-
-    else
-      echo "SKIP - transfer.sh not found; all files will be kept on local filesystem only" | tee -a $JOB_LOGFILE
-    fi
-
-    rcScheduler=$rcScheduler+rcTransfer
-
-
-    echo "INFO: `date` - DONE" | tee -a $JOB_LOGFILE
-    touch "${JOB_DONEFILE}.03.DONE"
-
-    echo
-    echo log for this executed job is
-    ls -l $JOB_LOGFILE
-    echo
-
-    echo "INFO: `date` - cleanup logs"
-    echo find "${DIR_LOGS}" -mtime +28 -delete -print
-    find "${DIR_LOGS}" -mtime +28 -delete -print
-    echo
-
-    echo STATUS $0 exit with final returncode rc=$rcScheduler | tee -a $JOB_LOGFILE
-    echo
-
-  else
-    echo "Nothing to do."
-  fi
-
-# --------------------------------------------------------------------------------
-
-
-  exit $rcScheduler
-
+echo "ERROR: $0 was DISABLED."
+echo "start $( dirname \"$0\" )/backup.sh by cron when you wish to start the backup."
+exit 1
 
 # --------------------------------------------------------------------------------