Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
I
iml-backup
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
IML Open Source
iml-backup
Commits
88b13d36
Commit
88b13d36
authored
1 year ago
by
Hahn Axel (hahn)
Browse files
Options
Downloads
Patches
Plain Diff
enable tmp file (faster); speedup loops in backup
parent
05af5273
No related branches found
No related tags found
1 merge request
!109
enable tmp file (faster); speedup loops in backup
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
plugins/localdump/couchdb2.sh
+48
-17
48 additions, 17 deletions
plugins/localdump/couchdb2.sh
with
48 additions
and
17 deletions
plugins/localdump/couchdb2.sh
+
48
−
17
View file @
88b13d36
...
@@ -28,6 +28,7 @@
...
@@ -28,6 +28,7 @@
# 2023-06-06 ah v1.9 show a warning if the sequence id was not fetched
# 2023-06-06 ah v1.9 show a warning if the sequence id was not fetched
# 2023-06-12 ah v1.10 skip couchdb dump if no sequence id was detected (=db deleted since fetching list of all dbs)
# 2023-06-12 ah v1.10 skip couchdb dump if no sequence id was detected (=db deleted since fetching list of all dbs)
# 2023-06-26 ah v1.11 speed up detection of changed databases
# 2023-06-26 ah v1.11 speed up detection of changed databases
# 2023-06-27 ah v1.12 enable tmp file for dblist again (faster); speedup loops in backup
# ================================================================================
# ================================================================================
if
[
-z
"
$BACKUP_TARGETDIR
"
]
;
then
if
[
-z
"
$BACKUP_TARGETDIR
"
]
;
then
...
@@ -106,11 +107,13 @@ function wait4curlcount(){
...
@@ -106,11 +107,13 @@ function wait4curlcount(){
function
reqCombined
(){
function
reqCombined
(){
typeset
-i
local
iChunksize
;
iChunksize
=
$1
typeset
-i
local
iChunksize
;
iChunksize
=
$1
typeset
-i
local
iParallel
;
iParallel
=
$2
typeset
-i
local
iParallel
;
iParallel
=
$2
local
dblistfile
;
dblistfile
=
"
$3
"
typeset
-i
iCounter
=
0
typeset
-i
iCounter
=
0
cmdline
=
cmdline
=
for
mydb
in
$dblist
# for mydb in $dblist
for
mydb
in
$(
cat
$dblistfile
)
do
do
iCounter+
=
1
iCounter+
=
1
...
@@ -213,18 +216,34 @@ function _doBackupOfSingleInstance(){
...
@@ -213,18 +216,34 @@ function _doBackupOfSingleInstance(){
test
-d
"
$_dir
"
||
(
echo
"creating
$_dir
"
;
mkdir
-p
"
$_dir
"
)
test
-d
"
$_dir
"
||
(
echo
"creating
$_dir
"
;
mkdir
-p
"
$_dir
"
)
done
done
echo
echo
"---
$(
date
)
Get list of all databases"
echo
" MOVE deleted databases into
${
ARCHIVE_DIR2
}
"
echo
# get a list of current databases
# get a list of current databases
dblist
=
$(
_getDblist
)
# dblist=$( _getDblist )
local
dblistfile
dblistfile
=
"/tmp/dblist_
${
COUCHDB_INSTANCE
}
"
_getDblist
>
"
${
dblistfile
}
"
# typeset -i iDbTotal=$( echo "$dblist" | wc -l )
typeset
-i
iDbTotal
=
$(
cat
"
$dblistfile
"
|
wc
-l
)
typeset
-i
iDb
=
0
# counter for number of database in the loop
typeset
-i
iDbCount
=
0
# counter for backed up databases
echo
"
${
COUCHDB_INSTANCE
}
has
$iDbTotal
databases"
# detect deleted databases:
# detect deleted databases:
echo
echo
"---
$(
date
)
MOVE deleted databases "
echo
"... into
${
ARCHIVE_DIR2
}
"
echo
for
dumpfile
in
$(
find
"
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
/"
-maxdepth
1
-type
f
-name
"*.couchdbdump.gz"
)
for
dumpfile
in
$(
find
"
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
/"
-maxdepth
1
-type
f
-name
"*.couchdbdump.gz"
)
do
do
dbname
=
$(
basename
$dumpfile
|
sed
"s#
\.
couchdbdump
\.
gz##g"
)
# extract database name: get basename and cut extension
if
!
grep
"^
${
dbname
}
"
<<<
"
$dblist
"
>
/dev/null
;
then
# dbname=$( basename $dumpfile | sed "s#\.couchdbdump\.gz##g" )
dbname
=
${
dumpfile
##*/
}
dbname
=
${
dbname
/%.couchdbdump.gz//
}
dbname
=
${
dbname
/\/
}
# if ! grep "^${dbname}" <<< "$dblist" >/dev/null; then
if
!
grep
"^
${
dbname
}
"
"
${
dblistfile
}
"
>
/dev/null
;
then
SEQFILE
=
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
/seq/__seq__
${
dbname
}
SEQFILE
=
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
/seq/__seq__
${
dbname
}
SECURITYFILE
=
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
/security/__security__
${
dbname
}
.json
SECURITYFILE
=
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
/security/__security__
${
dbname
}
.json
echo
"DELETED
$dbname
...
$(
ls
-l
${
dumpfile
}
|
cut
-f
5-
-d
' '
)
"
echo
"DELETED
$dbname
...
$(
ls
-l
${
dumpfile
}
|
cut
-f
5-
-d
' '
)
"
...
@@ -234,22 +253,21 @@ function _doBackupOfSingleInstance(){
...
@@ -234,22 +253,21 @@ function _doBackupOfSingleInstance(){
fi
fi
done
done
# done | tee /tmp/couch_archive_${COUCHDB_INSTANCE}.txt
# done | tee /tmp/couch_archive_${COUCHDB_INSTANCE}.txt
echo
typeset
-i
iDbTotal
=
$(
echo
"
$dblist
"
|
wc
-l
)
typeset
-i
iDb
=
0
typeset
-i
iDbCount
=
0
echo
echo
echo
" DUMP databases of instance
${
COUCHDB_INSTANCE
}
:
$iDbTotal
databases"
echo
"---
$(
date
)
DUMP databases"
echo
" of instance
${
COUCHDB_INSTANCE
}
:
$iDbTotal
databases"
echo
" TO BACKUP
${
BACKUP_TARGETDIR
}
/
${
COUCHDB_INSTANCE
}
"
echo
" TO BACKUP
${
BACKUP_TARGETDIR
}
/
${
COUCHDB_INSTANCE
}
"
echo
" ARCHIVE
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
"
echo
" ARCHIVE
${
ARCHIVE_DIR
}
/
${
COUCHDB_INSTANCE
}
"
echo
echo
# TODO: optimze creation of hash with database and sequence id
local
iChunksize
=
100
local
iChunksize
=
100
local
iParallel
=
10
local
iParallel
=
6
echo
"---
$(
date
)
- Get database meta infos ... max
$iParallel
parralel curl requests sending
$iChunksize
database urls per connection"
echo
"---
$(
date
)
- Get database meta infos ... max
$iParallel
parralel curl requests sending
$iChunksize
database urls per connection"
seq
=
$(
reqCombined 100 6 | jq
-r
' [ .db_name, .update_seq ] | @csv '
|
tr
-d
'"'
)
seq
=
$(
reqCombined
$iChunksize
$iParallel
"
$dblistfile
"
| jq
-r
' [ .db_name, .update_seq ] | @csv '
|
tr
-d
'"'
)
# seq=$( reqCombined $iChunksize $iParallel "$dblistfile" | jq -r ' [ .db_name, .update_seq ] | @csv ' | tr -d '"' | tr ',' ' ' | awk '{ sub(/-.*/, "\1", $2 ); print $1 " " $2 }' )
echo
"
$seq
"
|
head
declare
-A
aSeq
declare
-A
aSeq
...
@@ -261,10 +279,20 @@ function _doBackupOfSingleInstance(){
...
@@ -261,10 +279,20 @@ function _doBackupOfSingleInstance(){
# seqid=$( echo $seqvalue | cut -f 1 -d '-')
# seqid=$( echo $seqvalue | cut -f 1 -d '-')
aSeq+
=([
$db
]=
$seqid
)
aSeq+
=([
$db
]=
$seqid
)
done
done
# for line in $( echo "$seq" )
# do
# IFS=" " read -r db seqid <<< "$line"
# echo "... $line ... $db - $seqid"
# aSeq+=([$db]=$seqid)
# done
dbname
=
"_users"
# echo "Seq ID von $dbname: ${aSeq[$dbname]}"
# echo
# exit
for
dbname
in
$(
echo
"
$dblist
"
)
for
dbname
in
$(
cat
"
$dblist
file
"
)
do
do
iDb+
=
1
iDb+
=
1
echo
-n
"-----
$(
date
)
${
COUCHDB_INSTANCE
}
--
$iDb
of
$iDbTotal
-
${
dbname
}
- "
echo
-n
"-----
$(
date
)
${
COUCHDB_INSTANCE
}
--
$iDb
of
$iDbTotal
-
${
dbname
}
- "
...
@@ -323,6 +351,9 @@ function _doBackupOfSingleInstance(){
...
@@ -323,6 +351,9 @@ function _doBackupOfSingleInstance(){
fi
# if [ -z "$sSequenceCurrent" ]; then
fi
# if [ -z "$sSequenceCurrent" ]; then
fi
# if [ "${sSequenceCurrent}" = "${sSequenceLast}" ] ...
fi
# if [ "${sSequenceCurrent}" = "${sSequenceLast}" ] ...
done
done
rm
-f
"
$dblistfile
"
echo
"__DB__
$SERVICENAME
backup INFO:
${
COUCHDB_INSTANCE
}
- backed up
$iDbCount
dbs of
$iDbTotal
total"
echo
"__DB__
$SERVICENAME
backup INFO:
${
COUCHDB_INSTANCE
}
- backed up
$iDbCount
dbs of
$iDbTotal
total"
}
}
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment