From 199cca264ddb9ed1e6a307458dc2bb8ec6a02545 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Mon, 2 Dec 2019 14:49:33 +1100 Subject: [PATCH] DCD-828: Initial attempt at tar-based server-to-DC transform --- .../restore_conf_server/Dockerfile.j2 | 14 ++++ .../molecule/restore_conf_server/molecule.yml | 30 ++++++++ .../molecule/restore_conf_server/playbook.yml | 74 +++++++++++++++++++ .../restore_conf_server/tests/test_default.py | 15 ++++ roles/restore_backups/tasks/home_restore.yml | 18 +++++ roles/restore_backups/tasks/main.yml | 1 + 6 files changed, 152 insertions(+) create mode 100644 roles/restore_backups/molecule/restore_conf_server/Dockerfile.j2 create mode 100644 roles/restore_backups/molecule/restore_conf_server/molecule.yml create mode 100644 roles/restore_backups/molecule/restore_conf_server/playbook.yml create mode 100644 roles/restore_backups/molecule/restore_conf_server/tests/test_default.py diff --git a/roles/restore_backups/molecule/restore_conf_server/Dockerfile.j2 b/roles/restore_backups/molecule/restore_conf_server/Dockerfile.j2 new file mode 100644 index 0000000..e6aa95d --- /dev/null +++ b/roles/restore_backups/molecule/restore_conf_server/Dockerfile.j2 @@ -0,0 +1,14 @@ +# Molecule managed + +{% if item.registry is defined %} +FROM {{ item.registry.url }}/{{ item.image }} +{% else %} +FROM {{ item.image }} +{% endif %} + +RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ + elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ + elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ + elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ + elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ + elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/restore_backups/molecule/restore_conf_server/molecule.yml b/roles/restore_backups/molecule/restore_conf_server/molecule.yml new file mode 100644 index 0000000..04c0973 --- /dev/null +++ b/roles/restore_backups/molecule/restore_conf_server/molecule.yml @@ -0,0 +1,30 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint +platforms: + - name: amazon_linux2 + image: amazonlinux:2 + groups: + - aws_node_local + ulimits: + - nofile:262144:262144 +provisioner: + name: ansible + options: + skip-tags: runtime_pkg + lint: + name: ansible-lint + options: + x: ["701"] + inventory: + links: + group_vars: ../../../../group_vars/ +verifier: + name: testinfra + lint: + name: flake8 + enabled: false diff --git a/roles/restore_backups/molecule/restore_conf_server/playbook.yml b/roles/restore_backups/molecule/restore_conf_server/playbook.yml new file mode 100644 index 0000000..d526a64 --- /dev/null +++ b/roles/restore_backups/molecule/restore_conf_server/playbook.yml @@ -0,0 +1,74 @@ +--- +- name: Converge + hosts: all + vars: + atl_backup_home_dest: "{{ test_archive }}" + atl_backup_id: 'test-backup' + atl_backup_manifest_url: 'fake_manifest' + atl_backup_home_is_server: 'true' + + atl_product_home_shared: '/media/atl/confluence/shared-home' + atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/canary.tmp" + atl_product_edition: 'confluence' + atl_product_user: 'confluence' + atl_product_user_uid: '2001' + atl_product_version_cache: "{{ atl_product_home_shared }}/{{ atl_product_edition }}.version" + + test_archive: '/tmp/hello.tar.gz' + test_archive_file: 'hello.txt' + test_archive_source: '/tmp/hello' + + test_pre_step_prefix: '[PRE-TEST]' + test_product_version_file: "/tmp/{{ atl_product_edition }}.version" + + pre_tasks: + - name: "{{ test_pre_step_prefix }} Install tar and useradd/groupadd binaries" + package: + state: present + name: + - tar + - shadow-utils + + - name: "{{ test_pre_step_prefix }} Create application group" + group: + name: "{{ atl_product_user }}" + gid: "{{ atl_product_user_uid }}" + + - name: "{{ test_pre_step_prefix }} Create application user" + user: + name: "{{ atl_product_user }}" + uid: "{{ atl_product_user_uid }}" + group: "{{ atl_product_user }}" + + - name: "{{ test_pre_step_prefix }} Create a Conf server home directory structure" + file: + path: "{{ item }}" + state: directory + mode: 0755 + with_items: + - "{{ test_archive_source }}" + - "{{ test_archive_source }}/attachments" + - "{{ test_archive_source }}/shared-home" + + - name: "{{ test_pre_step_prefix }} Create files" + copy: + dest: "{{ item }}" + content: "content" + with_items: + - "{{ test_archive_source }}/unwanted.txt" + - "{{ test_archive_source }}/attachments/image.jpg" + - "{{ test_archive_source }}/shared-home/shared-content.txt" + + - name: "{{ test_pre_step_prefix }} Archive the shared home" + archive: + path: + - "{{ test_archive_source }}/*" + dest: "{{ test_archive }}" + owner: "{{ atl_product_user }}" + + tasks: + - name: Install distro-specific restore support packages + include_tasks: "../../tasks/{{ ansible_distribution|lower }}.yml" + + - name: Restore shared home + include_tasks: "../../tasks/home_restore.yml" diff --git a/roles/restore_backups/molecule/restore_conf_server/tests/test_default.py b/roles/restore_backups/molecule/restore_conf_server/tests/test_default.py new file mode 100644 index 0000000..ced7af8 --- /dev/null +++ b/roles/restore_backups/molecule/restore_conf_server/tests/test_default.py @@ -0,0 +1,15 @@ +import os +import pytest + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') + +def test_conf_server_converted(host): + assert host.file('/media/atl/confluence/shared-home').is_directory + assert host.file('/media/atl/confluence/shared-home/shared-content.txt').is_file + assert host.file('/media/atl/confluence/shared-home/attachments').is_directory + assert host.file('/media/atl/confluence/shared-home/attachments/image.jpg').is_file + + assert not host.file('/media/atl/confluence/shared-home/unwanted.txt').is_file diff --git a/roles/restore_backups/tasks/home_restore.yml b/roles/restore_backups/tasks/home_restore.yml index 4c5f2ad..5c0b7b4 100644 --- a/roles/restore_backups/tasks/home_restore.yml +++ b/roles/restore_backups/tasks/home_restore.yml @@ -18,6 +18,24 @@ src: "{{ atl_backup_home_dest }}" remote_src: yes dest: "{{ atl_product_home_shared }}" + when: atl_backup_home_is_server is not defined or not atl_backup_home_is_server|bool + + - name: Restore a Confluence server home to share-home layout + unarchive: + src: "{{ atl_backup_home_dest }}" + remote_src: yes + dest: "{{ atl_product_home_shared }}" + # Use tar transform to convert the Confluence Server + # (unclustered) layout to shared-home version. What occurs is: + # * --transform runs first, moving attachments into the shared home. + # * --strip-components removes the top-level directory + # NOTE: Also see the `confluence_config` role, which uses + # symlinks to support server and clustered layouts + # concurrently. + extra_opts: + - "--transform=s,^attachments,shared-home/attachments," + - "--strip-components=1" + when: atl_backup_home_is_server is defined and atl_backup_home_is_server|bool - name: Set shared home owner and group to application user file: diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index 1c81430..6ad9b98 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -58,6 +58,7 @@ atl_backup_id: "{{ atl_backup_manifest.name }}" atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.db.location.location | basename }}" atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.sharedHome.location.location | basename }}" + atl_backup_home_is_server: "{{ atl_backup_manifest.artifacts.sharedHome.serverHome }}" # FIXME: Here we fetch the backups. However we may wish to stream # these directly from S3 to the target DB/FS to avoid requiring