DCD-686: Move restore operations into the fetch role for the time being.

This commit is contained in:
Steve Smith
2019-10-10 11:05:20 +11:00
parent dddf3a86ec
commit 6f56925fa1
3 changed files with 59 additions and 20 deletions

View File

@@ -34,24 +34,7 @@
lc_collate: "{{ atl_jdbc_collation }}" lc_collate: "{{ atl_jdbc_collation }}"
lc_ctype: "{{ atl_jdbc_ctype }}" lc_ctype: "{{ atl_jdbc_ctype }}"
template: "{{ atl_jdbc_template }}" template: "{{ atl_jdbc_template }}"
register: db_create register: db_created
- name: Restore application database
postgresql_db:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
port: "{{ atl_db_port }}"
name: "{{ atl_jdbc_db_name }}"
owner: "{{ atl_jdbc_user }}"
encoding: "{{ atl_jdbc_encoding }}"
lc_collate: "{{ atl_jdbc_collation }}"
lc_ctype: "{{ atl_jdbc_ctype }}"
template: "{{ atl_jdbc_template }}"
# Depends on fetch_backup roles
state: restore
target: "{{ atl_backup_db_dest }}"
when: db_create.changed and atl_backup_db_dest is defined
tags: tags:
- new_only - new_only

View File

@@ -0,0 +1,4 @@
---
atl_backup_home_restore_canary_filename: ".slingshot_home_restore"
atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/{{ atl_backup_home_restore_canary_filename }}"

View File

@@ -3,6 +3,15 @@
# This role will attempt to fetch and load the backup manifest from a # This role will attempt to fetch and load the backup manifest from a
# remote HTTP or S3 URL. On successful completion the contents of JSON # remote HTTP or S3 URL. On successful completion the contents of JSON
# or YAML document will be in the var `atl_backup_manifest`. # or YAML document will be in the var `atl_backup_manifest`.
#
# PREREQUISITES:
# * `atl_backup_manifest_url` points at the manifest.
# * The shared home filesystem is mounted if necessary (e.g. NFS/EFS).
# * The database has been created and the variable `db_created` is
# registered with the result (i.e: `register: db_created`).
#
# NOTE: The actual DB/FS restore operations could potentially be split
# out into discrete roles, but currently that is not required.
- block: - block:
@@ -40,8 +49,6 @@
dest: "{{ atl_backup_manifest_dest }}" dest: "{{ atl_backup_manifest_dest }}"
when: atl_backup_manifest_url.scheme != 's3' when: atl_backup_manifest_url.scheme != 's3'
# FIXME: The manifest format is still undecided; everything
# referencing this variable should be considered a placeholder.
- name: Load parameters from manifest - name: Load parameters from manifest
include_vars: include_vars:
file: "{{ atl_backup_manifest_dest }}" file: "{{ atl_backup_manifest_dest }}"
@@ -49,6 +56,9 @@
- name: Define the DB and home dump destinations - name: Define the DB and home dump destinations
set_fact: set_fact:
# FIXME: The manifest format is still undecided so the
# following usages will need to be updated once it settles..
atl_backup_id: "{{ atl_backup_manifest.name }}"
atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.db_dump | basename }}" atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.db_dump | basename }}"
atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.shared_home_dump | basename }}" atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.shared_home_dump | basename }}"
@@ -74,4 +84,46 @@
- name: Install distro-specific restore support packages - name: Install distro-specific restore support packages
include_tasks: "{{ ansible_distribution|lower }}.yml" include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Restore application database
postgresql_db:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
port: "{{ atl_db_port }}"
name: "{{ atl_jdbc_db_name }}"
owner: "{{ atl_jdbc_user }}"
encoding: "{{ atl_jdbc_encoding }}"
lc_collate: "{{ atl_jdbc_collation }}"
lc_ctype: "{{ atl_jdbc_ctype }}"
template: "{{ atl_jdbc_template }}"
# Depends on fetch_backup roles
state: restore
target: "{{ atl_backup_db_dest }}"
when: db_created.changed and atl_backup_db_dest is defined
- name: Check for the restore canary file
stat:
path: "{{ atl_backup_home_restore_canary_path }}"
register: restore_canary
- name: Create shared home if necessary
file:
path: "{{ atl_product_home_shared }}"
state: directory
mode: 0750
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
when: restore_canary.stat.exists
- name: Restore the shared-home backup
unarchive:
path: "{{ atl_backup_home_restore_canary_path }}"
dest: "{{ atl_product_home_shared }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
when: restore_canary.stat.exists
when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' when: atl_backup_manifest_url is defined and atl_backup_manifest_url != ''