Yedidyah Bar David has uploaded a new change for review. Change subject: packaging: engine-backup: allow custom dump/compression ......................................................................
packaging: engine-backup: allow custom dump/compression Support plain or custom dump Support compressing each of: - final archive - files tar - engine/dwh/reports db dump with any of: - gzip - bzip2 - xz - None Support passing --jobs to pg_restore on custom format Keep files inside archive as tar file and not individually TODO: implement --small-size, --fast-restore after testing Bug-Url: https://bugzilla.redhat.com/1188132 Change-Id: Ibf974956d4ee0abfcc497968471912b212e7e8c2 Signed-off-by: Yedidyah Bar David <d...@redhat.com> --- M packaging/bin/engine-backup.sh 1 file changed, 279 insertions(+), 52 deletions(-) git pull ssh://gerrit.ovirt.org:29418/ovirt-engine refs/changes/96/39896/1 diff --git a/packaging/bin/engine-backup.sh b/packaging/bin/engine-backup.sh index 8fee6cf..570e6d8 100755 --- a/packaging/bin/engine-backup.sh +++ b/packaging/bin/engine-backup.sh @@ -156,7 +156,22 @@ reportsdb reports database only --file=FILE file to use during backup or restore --log=FILE log file to use + --archive-compressor=COMPRESSOR + Use COMPRESSOR to compress the backup file, can be one of: + gzip + bzip2 + xz + None + --files-compressor=COMPRESSOR for the files, same options as --archive-compressor --keep-temporary-data Do not cleanup temporary data on restore + --db-compressor=COMPRESSOR for the Engine, same options as --archive-compressor + --db-dump-format=FORMAT + Engine DB dump format, see pg_dump(1) for details. Can be one of: + plain + custom + --db-restore-jobs=JOBS For the engine db restore when using custom dump + format, number of jobs. Passed as the '--jobs' + option of pg_restore. Defaults to 2. --change-db-credentials activate the following options, to restore the Engine database to a different location etc. If used, existing credentials are ignored. @@ -169,6 +184,9 @@ --db-name=name set database name --db-secured set a secured connection --db-secured-validation validate host + --dwh-db-compressor=COMPRESSOR for DWH, same options as --archive-compressor + --dwh-db-dump-format=FORMAT for DWH, same options as --db-dump-format + --dwh-db-restore-jobs=JOBS for DWH, same as --db-restore-jobs --change-dwh-db-credentials activate the following options, to restore the DWH database to a different location etc. If used, existing credentials are ignored. @@ -181,6 +199,9 @@ --dwh-db-name=name set dwh database name --dwh-db-secured set a secured connection for dwh --dwh-db-secured-validation validate host for dwh + --reports-db-compressor=COMPRESSOR for Reports, same options as --archive-compressor + --reports-db-dump-format=FORMAT for Reports, same options as --db-dump-format + --reports-db-restore-jobs=JOBS for Reports, same as --db-restore-jobs --change-reports-db-credentials activate the following options, to restore the Reports database to a different location etc. If used, existing credentials are ignored. @@ -193,6 +214,42 @@ --reports-db-name=name set reports database name --reports-db-secured set a secured connection for reports --reports-db-secured-validation validate host for reports + + --fast-restore the default for backup, equivalent to: + --archive-compressor=gzip \ + --files-compressor=xz \ + --db-dump-format=custom \ + --db-compressor=None \ + --dwh-db-dump-format=custom \ + --dwh-db-compressor=None \ + --reports-db-dump-format=custom \ + --reports-db-compressor=None + + In addition, you should pass, when restoring: + --db-restore-jobs=N \ + --dwh-db-restore-jobs=N \ + --reports-db-restore-jobs=N + where 'N' is around number of available cpu cores times 1.5. + + --small-size for a small backup file, equivalent to: + TODO + + Below wasn't smallest for an empty engine db (right after setup). smallest was + with: --archive-compressor=xz --files-compressor=xz --db-compressor=xz --db-dump-format=plain + + Weird. + + Need to test with non-empty DBs. + + TODO + --archive-compressor=xz \ + --files-compressor=None \ + --db-dump-format=plain \ + --db-compressor=None \ + --dwh-db-dump-format=plain \ + --dwh-db-compressor=None \ + --reports-db-dump-format=plain \ + --reports-db-compressor=None ENVIRONMENT VARIABLES @@ -241,6 +298,11 @@ SCOPE_DWH_DB= SCOPE_REPORTS_DB= KEEP_TEMPORARY_DATA= +ARCHIVE_COMPRESS_OPTION=z +FILES_COMPRESS_OPTION=J +DB_DUMP_COMPRESSOR= +DB_DUMP_FORMAT=custom +DB_RESTORE_JOBS=2 CHANGE_DB_CREDENTIALS= MY_DB_HOST= MY_DB_PORT=5432 @@ -251,6 +313,9 @@ MY_DB_SECURED=False MY_DB_SECURED_VALIDATION=False MY_DB_CREDS= +DWH_DB_DUMP_COMPRESSOR= +DWH_DB_DUMP_FORMAT=custom +DWH_DB_RESTORE_JOBS=2 CHANGE_DWH_DB_CREDENTIALS= MY_DWH_DB_HOST= MY_DWH_DB_PORT=5432 @@ -261,6 +326,9 @@ MY_DWH_DB_SECURED=False MY_DWH_DB_SECURED_VALIDATION=False MY_DWH_DB_CREDS= +REPORTS_DB_DUMP_COMPRESSOR= +REPORTS_DB_DUMP_FORMAT=custom +REPORTS_DB_RESTORE_JOBS=2 CHANGE_REPORTS_DB_CREDENTIALS= MY_REPORTS_DB_HOST= MY_REPORTS_DB_PORT=5432 @@ -271,6 +339,46 @@ MY_REPORTS_DB_SECURED=False MY_REPORTS_DB_SECURED_VALIDATION=False MY_REPORTS_DB_CREDS= + +compressor_to_tar_option() { + local res + case "$1" in + gzip) res=z ;; + bzip2) res=j ;; + xz) res=J ;; + None) res= ;; + *) die "invalid compressor '${v}'" + esac + echo "${res}" +} + +compressor_to_command() { + local res + case "$1" in + gzip|bzip2|xz) res="$1" ;; + None) res= ;; + *) die "invalid compressor '${v}'" + esac + echo "${res}" +} + +parse_dump_format() { + local res + case "$1" in + plain|custom) res="$1" ;; + *) die "invalid dump format '${v}'" + esac + echo "${res}" +} + +parse_jobs() { + local res + case "$1" in + ''|*[!0-9]*) die "invalid number of jobs" ;; + *) res="$1" + esac + echo "${res}" +} parseArgs() { local DB_PASSFILE @@ -300,8 +408,28 @@ --log=*) LOG="${v}" ;; + --archive-compressor=*) + ARCHIVE_COMPRESS_OPTION=$(compressor_to_tar_option "${v}") + [ $? != 0 ] && logdie "failed parsing compressor" + ;; + --files-compressor=*) + FILES_COMPRESS_OPTION=$(compressor_to_tar_option "${v}") + [ $? != 0 ] && logdie "failed parsing compressor" + ;; --keep-temporary-data) KEEP_TEMPORARY_DATA=1 + ;; + --db-compressor=*) + DB_DUMP_COMPRESSOR=$(compressor_to_command "${v}") + [ $? != 0 ] && logdie "failed parsing compressor" + ;; + --db-dump-format=*) + DB_DUMP_FORMAT=$(parse_dump_format "${v}") + [ $? != 0 ] && logdie "failed parsing dump format" + ;; + --db-restore-jobs=*) + DB_RESTORE_JOBS=$(parse_jobs "${v}") + [ $? != 0 ] && logdie "failed parsing jobs" ;; --change-db-credentials) CHANGE_DB_CREDENTIALS=1 @@ -337,6 +465,18 @@ --db-sec-validation) MY_DB_SECURED_VALIDATION="True" ;; + --dwh-db-compressor=*) + DWH_DB_DUMP_COMPRESSOR=$(compressor_to_command "${v}") + [ $? != 0 ] && logdie "failed parsing compressor" + ;; + --dwh-db-dump-format=*) + DWH_DB_DUMP_FORMAT=$(parse_dump_format "${v}") + [ $? != 0 ] && logdie "failed parsing dump format" + ;; + --dwh-db-restore-jobs=*) + DWH_DB_RESTORE_JOBS=$(parse_jobs "${v}") + [ $? != 0 ] && logdie "failed parsing jobs" + ;; --change-dwh-db-credentials) CHANGE_DWH_DB_CREDENTIALS=1 ;; @@ -370,6 +510,18 @@ ;; --dwh-db-sec-validation) MY_DWH_DB_SECURED_VALIDATION="True" + ;; + --reports-db-compressor=*) + REPORTS_DB_DUMP_COMPRESSOR=$(compressor_to_command "${v}") + [ $? != 0 ] && logdie "failed parsing compressor" + ;; + --reports-db-dump-format=*) + REPORTS_DB_DUMP_FORMAT=$(parse_dump_format "${v}") + [ $? != 0 ] && logdie "failed parsing dump format" + ;; + --reports-db-restore-jobs=*) + REPORTS_DB_RESTORE_JOBS=$(parse_jobs "${v}") + [ $? != 0 ] && logdie "failed parsing jobs" ;; --change-reports-db-credentials) CHANGE_REPORTS_DB_CREDENTIALS=1 @@ -478,7 +630,6 @@ local tardir="${TEMP_FOLDER}/tar" log "Creating temp folder ${tardir}" mkdir "${tardir}" || logdie "Cannot create '${tardir}'" - mkdir "${tardir}/files" || logdie "Cannot create '${tardir}/files" mkdir "${tardir}/db" || logdie "Cannot create '${tardir}/db'" if [ -n "${SCOPE_FILES}" ] ; then @@ -490,19 +641,20 @@ if [ -n "${SCOPE_ENGINE_DB}" -a -n "${ENGINE_DB_USER}" ]; then output "- Engine database '"${ENGINE_DB_DATABASE}"'" log "Backing up database to ${tardir}/db/${DB_BACKUP_FILE_NAME}" - backupDB "${tardir}/db/${DB_BACKUP_FILE_NAME}" "${ENGINE_DB_USER}" "${ENGINE_DB_HOST}" "${ENGINE_DB_PORT}" "${ENGINE_DB_DATABASE}" + backupDB "${tardir}/db/${DB_BACKUP_FILE_NAME}" "${ENGINE_DB_USER}" "${ENGINE_DB_HOST}" "${ENGINE_DB_PORT}" "${ENGINE_DB_DATABASE}" "${DB_DUMP_COMPRESSOR}" "${DB_DUMP_FORMAT}" fi if [ -n "${SCOPE_DWH_DB}" -a -n "${DWH_DB_USER}" ]; then output "- DWH database '"${DWH_DB_DATABASE}"'" log "Backing up dwh database to ${tardir}/db/${DWHDB_BACKUP_FILE_NAME}" - backupDB "${tardir}/db/${DWHDB_BACKUP_FILE_NAME}" "${DWH_DB_USER}" "${DWH_DB_HOST}" "${DWH_DB_PORT}" "${DWH_DB_DATABASE}" + backupDB "${tardir}/db/${DWHDB_BACKUP_FILE_NAME}" "${DWH_DB_USER}" "${DWH_DB_HOST}" "${DWH_DB_PORT}" "${DWH_DB_DATABASE}" "${DWH_DB_DUMP_COMPRESSOR}" "${DWH_DB_DUMP_FORMAT}" fi if [ -n "${SCOPE_REPORTS_DB}" -a -n "${REPORTS_DB_USER}" ]; then output "- Reports database '"${REPORTS_DB_DATABASE}"'" log "Backing up reports database to ${tardir}/db/${REPORTSDB_BACKUP_FILE_NAME}" - backupDB "${tardir}/db/${REPORTSDB_BACKUP_FILE_NAME}" "${REPORTS_DB_USER}" "${REPORTS_DB_HOST}" "${REPORTS_DB_PORT}" "${REPORTS_DB_DATABASE}" + backupDB "${tardir}/db/${REPORTSDB_BACKUP_FILE_NAME}" "${REPORTS_DB_USER}" "${REPORTS_DB_HOST}" "${REPORTS_DB_PORT}" "${REPORTS_DB_DATABASE}" "${REPORTS_DB_DUMP_COMPRESSOR}" "${REPORTS_DB_DUMP_FORMAT}" fi echo "${PACKAGE_VERSION}" > "${tardir}/version" || logdie "Can't create ${tardir}/version" + output_config > "${tardir}/config" || logdie "Can't create ${tardir}/config" log "Creating md5sum at ${tardir}/md5sum" createmd5 "${tardir}" "${tardir}/md5sum" output "Packing into file '${FILE}'" @@ -533,12 +685,12 @@ backupFiles() { local paths="$1" local target="$2" - echo "${paths}" | while read -r path; do - [ -e "${path}" ] || continue - local dirname="$(dirname ${path})" - mkdir -p "${tardir}/files/${dirname}" || logdie "Cannot create '${tardir}/files/${dirname}" - cp -a "${path}" "${target}/${dirname}" || logdie "Cannot copy ${path} to ${target}/${dirname}" - done || logdie "Cannot read ${paths}" + echo "${paths}" | \ + while read -r path; do + [ -e "${path}" ] && echo "${path}" + done | \ + sed 's;^/;;' | \ + tar -C / --files-from - -cpSs"${FILES_COMPRESS_OPTION}"f "${target}" || logdie "Failed backing up ${paths}" } backupDB() { @@ -547,20 +699,39 @@ local host="$3" local port="$4" local database="$5" + local compressor="$6" + local format="$7" + local pgdump_log="${TEMP_FOLDER}/pgdump.log" - PGPASSFILE="${MYPGPASS}" pg_dump \ - -E "UTF8" \ - --disable-dollar-quoting \ - --disable-triggers \ - --format=p \ - -w \ - -U "${user}" \ - -h "${host}" \ - -p "${port}" \ - "${database}" \ - 2> "${pgdump_log}" \ - | bzip2 > "${file}.bz2" \ - || logdie "bzip2 failed compressing the backup of database ${database}" + if [ -n "${compressor}" ]; then + PGPASSFILE="${MYPGPASS}" pg_dump \ + -E "UTF8" \ + --disable-dollar-quoting \ + --disable-triggers \ + --format="${format}" \ + -w \ + -U "${user}" \ + -h "${host}" \ + -p "${port}" \ + "${database}" \ + 2> "${pgdump_log}" \ + | "${compressor}" > "${file}" \ + || logdie "${compressor} failed compressing the backup of database ${database}" + else + PGPASSFILE="${MYPGPASS}" pg_dump \ + -E "UTF8" \ + --disable-dollar-quoting \ + --disable-triggers \ + --format="${format}" \ + -w \ + -U "${user}" \ + -h "${host}" \ + -p "${port}" \ + "${database}" \ + 2> "${pgdump_log}" \ + > "${file}" \ + || logdie "Database ${database} backup failed" + fi if [ -s "${pgdump_log}" ]; then cat "${pgdump_log}" >> "${LOG}" @@ -601,11 +772,12 @@ log "Verifying version" verifyVersion + . "${TEMP_FOLDER}/config" output "Restoring:" if [ -n "${SCOPE_FILES}" ] ; then output "- Files" log "Restoring files" - restoreFiles "${BACKUP_PATHS}" + restoreFiles "${BACKUP_PATHS}" "${TEMP_FOLDER}/files" fi log "Reloading configuration" @@ -624,7 +796,7 @@ if [ -n "${SCOPE_ENGINE_DB}" -a -n "${ENGINE_DB_USER}" ]; then output "- Engine database '"${ENGINE_DB_DATABASE}"'" log "Restoring engine database backup at ${TEMP_FOLDER}/db/${DB_BACKUP_FILE_NAME}" - restoreDB "${TEMP_FOLDER}/db/${DB_BACKUP_FILE_NAME}" "${ENGINE_DB_USER}" "${ENGINE_DB_HOST}" "${ENGINE_DB_PORT}" "${ENGINE_DB_DATABASE}" "${ORIG_DB_USER}" + restoreDB "${TEMP_FOLDER}/db/${DB_BACKUP_FILE_NAME}" "${ENGINE_DB_USER}" "${ENGINE_DB_HOST}" "${ENGINE_DB_PORT}" "${ENGINE_DB_DATABASE}" "${ORIG_DB_USER}" "${DB_DUMP_COMPRESSOR}" "${DB_DUMP_FORMAT}" if [ -z "${KEEP_TEMPORARY_DATA}" ]; then output "Cleaning up temporary tables in engine database '${ENGINE_DB_DATABASE}':" cleanDbTempData "${ENGINE_DB_USER}" "${ENGINE_DB_HOST}" "${ENGINE_DB_PORT}" "${ENGINE_DB_DATABASE}" "${ENGINE_TABLES_TO_CLEAN_ON_RESTORE}" @@ -633,12 +805,12 @@ if [ -n "${SCOPE_DWH_DB}" -a -n "${DWH_DB_USER}" ]; then output "- DWH database '"${DWH_DB_DATABASE}"'" log "Restoring dwh database backup at ${TEMP_FOLDER}/db/${DWHDB_BACKUP_FILE_NAME}" - restoreDB "${TEMP_FOLDER}/db/${DWHDB_BACKUP_FILE_NAME}" "${DWH_DB_USER}" "${DWH_DB_HOST}" "${DWH_DB_PORT}" "${DWH_DB_DATABASE}" "${ORIG_DWH_DB_USER}" + restoreDB "${TEMP_FOLDER}/db/${DWHDB_BACKUP_FILE_NAME}" "${DWH_DB_USER}" "${DWH_DB_HOST}" "${DWH_DB_PORT}" "${DWH_DB_DATABASE}" "${ORIG_DWH_DB_USER}" "${DWH_DB_DUMP_COMPRESSOR}" "${DWH_DB_DUMP_FORMAT}" fi if [ -n "${SCOPE_REPORTS_DB}" -a -n "${REPORTS_DB_USER}" ]; then output "- Reports database '"${REPORTS_DB_DATABASE}"'" log "Restoring REPORTS database backup at ${TEMP_FOLDER}/db/${REPORTSDB_BACKUP_FILE_NAME}" - restoreDB "${TEMP_FOLDER}/db/${REPORTSDB_BACKUP_FILE_NAME}" "${REPORTS_DB_USER}" "${REPORTS_DB_HOST}" "${REPORTS_DB_PORT}" "${REPORTS_DB_DATABASE}" "${ORIG_REPORTS_DB_USER}" + restoreDB "${TEMP_FOLDER}/db/${REPORTSDB_BACKUP_FILE_NAME}" "${REPORTS_DB_USER}" "${REPORTS_DB_HOST}" "${REPORTS_DB_PORT}" "${REPORTS_DB_DATABASE}" "${ORIG_REPORTS_DB_USER}" "${REPORTS_DB_DUMP_COMPRESSOR}" "${REPORTS_DB_DUMP_FORMAT}" fi [ -n "${CHANGE_DB_CREDENTIALS}" ] && changeEngineDBConf [ -n "${CHANGE_DWH_DB_CREDENTIALS}" ] && changeDwhDBConf @@ -716,20 +888,62 @@ local port="$4" local database="$5" local orig_user="$6" - log "restoreDB: backupfile ${backupfile} user ${user} host ${host} port ${port} database ${database} orig_user ${orig_user}" - local psqllog="${TEMP_FOLDER}/psql-restore-log" - bz_cat "${backupfile}" | \ - PGPASSFILE="${MYPGPASS}" psql \ - -w \ - -U "${user}" \ - -h "${host}" \ - -p "${port}" \ - -d "${database}" \ - > "${psqllog}" 2>&1 \ - || logdie "Database ${database} restore failed" + local compressor="$7" + local format="$8" - cat "${psqllog}" >> "${LOG}" 2>&1 \ - || logdie "Failed to append psql log to restore log" + log "restoreDB: backupfile ${backupfile} user ${user} host ${host} port ${port} database ${database} orig_user ${orig_user}" + local pgrestorelog="${TEMP_FOLDER}/pg-restore-log" + + if [ "${format}" = "plain" ]; then + if [ -z "${compressor}" ]; then + PGPASSFILE="${MYPGPASS}" psql \ + -f "${backupfile}" \ + -w \ + -U "${user}" \ + -h "${host}" \ + -p "${port}" \ + -d "${database}" \ + > "${pgrestorelog}" 2>&1 \ + || logdie "Database ${database} restore failed" + else + # Requires the compressor to support '-d'. All our current ones do. + "${compressor}" -d < "${backupfile}" | \ + PGPASSFILE="${MYPGPASS}" psql \ + -w \ + -U "${user}" \ + -h "${host}" \ + -p "${port}" \ + -d "${database}" \ + > "${pgrestorelog}" 2>&1 \ + || logdie "Database ${database} restore failed" + fi + elif [ "${format}" = "custom" ]; then + if [ -z "${compressor}" ]; then + PGPASSFILE="${MYPGPASS}" pg_restore \ + -w \ + -U "${user}" \ + -h "${host}" \ + -p "${port}" \ + -d "${database}" \ + "${backupfile}" \ + > "${pgrestorelog}" 2>&1 + else + # Requires the compressor to support '-d'. All our current ones do. + "${compressor}" -d < "${backupfile}" | \ + PGPASSFILE="${MYPGPASS}" pg_restore \ + -w \ + -U "${user}" \ + -h "${host}" \ + -p "${port}" \ + -d "${database}" \ + > "${pgrestorelog}" 2>&1 + fi + else + logdie "Unsupported format ${format}" + fi + + cat "${pgrestorelog}" >> "${LOG}" 2>&1 \ + || logdie "Failed to append pg log to restore log" local IGNORED_ERRORS=$(cat << __EOF | egrep -v '^$|^#' | tr '\012' '|' | sed 's/|$//' language "plpgsql" already exists @@ -753,7 +967,7 @@ role "$orig_user" does not exist __EOF ) - local numerrors=$(grep 'ERROR: ' "${psqllog}" | grep -Ev "${IGNORED_ERRORS}" | wc -l) + local numerrors=$(grep 'ERROR: ' "${pgrestorelog}" | grep -Ev "${IGNORED_ERRORS}" | wc -l) [ ${numerrors} -ne 0 ] && logdie "Errors while restoring database ${database}" } @@ -778,21 +992,20 @@ || logdie "Failed cleaning up ${table}" cat "${psqllog}" >> "${LOG}" 2>&1 \ || logdie "Failed to append psql log to restore log" - done + done || logdie "Failed cleaning up temp data" } restoreFiles() { local paths="$1" - echo "${paths}" | while read -r path; do - local dirname="$(dirname ${path})" - local backup="${TEMP_FOLDER}/files/${path}" - [ -e "${backup}" ] || continue - [ -d "${dirname}" ] || mkdir -p "${dirname}" || logdie "Cannot create directory ${dirname}" - cp -a "${backup}" "${dirname}" || logdie "Cannot copy '${backup}' to '${dirname}'" - if selinuxenabled; then - restorecon -R "${path}" || logdie "Failed setting selinux context for ${path}" - fi - done || logdie "Cannot read ${paths}" + local archive="$2" + tar -C / -pSsxf "${archive}" || logdie "Failed restoring ${paths}" + if selinuxenabled; then + echo "${paths}" | while read -r path; do + if [ -e "${path}" ]; then + restorecon -R "${path}" || logdie "Failed setting selinux context for ${path}" + fi + done || logdie "Failed setting selinux contexts" + fi } setMyEngineDBCredentials() { @@ -1032,6 +1245,20 @@ ) } +output_config() { + # TODO find some better name? + local var + local VARS_TO_SAVE="DB_DUMP_COMPRESSOR +DB_DUMP_FORMAT +DWH_DB_DUMP_COMPRESSOR +DWH_DB_DUMP_FORMAT +REPORTS_DB_DUMP_COMPRESSOR +REPORTS_DB_DUMP_FORMAT" + echo "${VARS_TO_SAVE}" | while read -r var; do + eval echo "${var}=\${${var}}" + done +} + ## Main my_load_config -- To view, visit https://gerrit.ovirt.org/39896 To unsubscribe, visit https://gerrit.ovirt.org/settings Gerrit-MessageType: newchange Gerrit-Change-Id: Ibf974956d4ee0abfcc497968471912b212e7e8c2 Gerrit-PatchSet: 1 Gerrit-Project: ovirt-engine Gerrit-Branch: master Gerrit-Owner: Yedidyah Bar David <d...@redhat.com> _______________________________________________ Engine-patches mailing list Engine-patches@ovirt.org http://lists.ovirt.org/mailman/listinfo/engine-patches