mirror of
https://bitbucket.org/atlassian/dc-deployments-automation.git
synced 2025-12-14 00:43:06 -06:00
Merged in DCD-1462-build-fix (pull request #157)
DCD-1462 build fix Approved-by: Geoff Jacobs Approved-by: Lee Goolsbee Approved-by: Dylan Rathbone
This commit is contained in:
@@ -36,7 +36,6 @@
|
|||||||
- role: product_common
|
- role: product_common
|
||||||
- role: product_install
|
- role: product_install
|
||||||
- role: database_init
|
- role: database_init
|
||||||
- role: restore_backups
|
|
||||||
- role: bitbucket_config
|
- role: bitbucket_config
|
||||||
- role: product_startup
|
- role: product_startup
|
||||||
- role: bitbucket_dataset_restore
|
- role: bitbucket_dataset_restore
|
||||||
|
|||||||
@@ -24,7 +24,6 @@
|
|||||||
tags: [skip_on_stack_update]
|
tags: [skip_on_stack_update]
|
||||||
- role: database_init
|
- role: database_init
|
||||||
tags: [skip_on_stack_update]
|
tags: [skip_on_stack_update]
|
||||||
- role: restore_backups
|
|
||||||
- role: confluence_common
|
- role: confluence_common
|
||||||
- role: confluence_config
|
- role: confluence_config
|
||||||
- role: product_startup
|
- role: product_startup
|
||||||
|
|||||||
@@ -31,6 +31,5 @@
|
|||||||
tags: [skip_on_stack_update]
|
tags: [skip_on_stack_update]
|
||||||
- role: database_init
|
- role: database_init
|
||||||
tags: [skip_on_stack_update]
|
tags: [skip_on_stack_update]
|
||||||
- role: restore_backups
|
|
||||||
- role: jira_config
|
- role: jira_config
|
||||||
- role: product_startup
|
- role: product_startup
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ pipelines:
|
|||||||
- step:
|
- step:
|
||||||
name: Pre Parallelization stage
|
name: Pre Parallelization stage
|
||||||
script:
|
script:
|
||||||
- echo "Running tests in 39 batches"
|
- echo "Running tests in 36 batches"
|
||||||
|
|
||||||
- step:
|
- step:
|
||||||
name: Check if the template is up-to-date
|
name: Check if the template is up-to-date
|
||||||
@@ -321,31 +321,6 @@ pipelines:
|
|||||||
- ./bin/install-ansible --dev
|
- ./bin/install-ansible --dev
|
||||||
- cd roles/product_startup
|
- cd roles/product_startup
|
||||||
- pipenv run molecule test -s synchrony
|
- pipenv run molecule test -s synchrony
|
||||||
- step:
|
|
||||||
name: restore_backups/default
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
script:
|
|
||||||
- ./bin/install-ansible --dev
|
|
||||||
- cd roles/restore_backups
|
|
||||||
- pipenv run molecule test -s default
|
|
||||||
- step:
|
|
||||||
name: restore_backups/restore_conf_server
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
script:
|
|
||||||
- ./bin/install-ansible --dev
|
|
||||||
- cd roles/restore_backups
|
|
||||||
- pipenv run molecule test -s restore_conf_server
|
|
||||||
- step:
|
|
||||||
name: restore_backups/restore_jira_clustered
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
script:
|
|
||||||
- ./bin/install-ansible --dev
|
|
||||||
- cd roles/restore_backups
|
|
||||||
- pipenv run molecule test -s restore_jira_clustered
|
|
||||||
|
|
||||||
- step:
|
- step:
|
||||||
name: Run Snyk security scan
|
name: Run Snyk security scan
|
||||||
services:
|
services:
|
||||||
|
|||||||
@@ -151,6 +151,3 @@ atl_rds_instance_class: "{{ lookup('env', 'ATL_RDS_INSTANCE_CLASS') }}"
|
|||||||
atl_rds_multi_az: "{{ lookup('env', 'ATL_RDS_MULTI_AZ') }}"
|
atl_rds_multi_az: "{{ lookup('env', 'ATL_RDS_MULTI_AZ') }}"
|
||||||
atl_rds_subnet_group_name: "{{ lookup('env', 'ATL_RDS_SUBNET_GROUP_NAME') }}"
|
atl_rds_subnet_group_name: "{{ lookup('env', 'ATL_RDS_SUBNET_GROUP_NAME') }}"
|
||||||
atl_rds_security_group: "{{ lookup('env', 'ATL_RDS_SECURITY_GROUP') }}"
|
atl_rds_security_group: "{{ lookup('env', 'ATL_RDS_SECURITY_GROUP') }}"
|
||||||
|
|
||||||
atl_backup_manifest_url: "{{ lookup('env', 'ATL_BACKUP_MANIFEST_URL') }}"
|
|
||||||
atl_restore_required: "{{ atl_backup_manifest_url is defined and atl_backup_manifest_url != '' }}"
|
|
||||||
|
|||||||
@@ -14,11 +14,6 @@
|
|||||||
set_fact:
|
set_fact:
|
||||||
ec2_autoscaling_group: "{{ ec2_instance_tags.tags['aws:autoscaling:groupName'] | default('') }}"
|
ec2_autoscaling_group: "{{ ec2_instance_tags.tags['aws:autoscaling:groupName'] | default('') }}"
|
||||||
|
|
||||||
# Because Ansible - https://github.com/ansible/ansible/issues/11905#issuecomment-130496173
|
|
||||||
- name: Set fact to store boolean 'atl_restore_required' value as as array of one string
|
|
||||||
set_fact:
|
|
||||||
atl_restore_required_tag_value: ["{{ atl_restore_required | lower }}"]
|
|
||||||
|
|
||||||
- block:
|
- block:
|
||||||
# No existing timestamp, so this is a first run. Persist some metadata into the ASG.
|
# No existing timestamp, so this is a first run. Persist some metadata into the ASG.
|
||||||
- name: Fetch the git revision for this repo
|
- name: Fetch the git revision for this repo
|
||||||
@@ -41,12 +36,6 @@
|
|||||||
Key: "atl:deployment:first-run-timestamp"
|
Key: "atl:deployment:first-run-timestamp"
|
||||||
Value: "TIMESTAMP: {{ ansible_date_time.iso8601 }}"
|
Value: "TIMESTAMP: {{ ansible_date_time.iso8601 }}"
|
||||||
|
|
||||||
- ResourceType: "auto-scaling-group"
|
|
||||||
ResourceId: "{{ ec2_autoscaling_group }}"
|
|
||||||
PropagateAtLaunch: true
|
|
||||||
Key: "atl:deployment:is-stack-restored"
|
|
||||||
Value: "{{ atl_restore_required_tag_value.0 }}"
|
|
||||||
|
|
||||||
|
|
||||||
# Set the tags on the ASG and the local instance. We need to
|
# Set the tags on the ASG and the local instance. We need to
|
||||||
# ignore errors as it's possible we don't have the permissions,
|
# ignore errors as it's possible we don't have the permissions,
|
||||||
@@ -65,7 +54,6 @@
|
|||||||
tags:
|
tags:
|
||||||
"atl:deployment:commit-id": "COMMIT: {{ git_out.stdout }}"
|
"atl:deployment:commit-id": "COMMIT: {{ git_out.stdout }}"
|
||||||
"atl:deployment:first-run-timestamp": "TIMESTAMP: {{ ansible_date_time.iso8601 }}"
|
"atl:deployment:first-run-timestamp": "TIMESTAMP: {{ ansible_date_time.iso8601 }}"
|
||||||
"atl:deployment:is-stack-restored": "{{ atl_restore_required_tag_value.0 }}"
|
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
|
|
||||||
when:
|
when:
|
||||||
|
|||||||
@@ -1,12 +0,0 @@
|
|||||||
extends: default
|
|
||||||
|
|
||||||
rules:
|
|
||||||
braces:
|
|
||||||
max-spaces-inside: 1
|
|
||||||
level: error
|
|
||||||
brackets:
|
|
||||||
max-spaces-inside: 1
|
|
||||||
level: error
|
|
||||||
line-length: disable
|
|
||||||
truthy: disable
|
|
||||||
trailing-spaces: false
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
|
|
||||||
atl_backup_home_restore_canary_filename: ".slingshot_home_restore"
|
|
||||||
atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/{{ atl_backup_home_restore_canary_filename }}"
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# Molecule managed
|
|
||||||
|
|
||||||
{% if item.registry is defined %}
|
|
||||||
FROM {{ item.registry.url }}/{{ item.image }}
|
|
||||||
{% else %}
|
|
||||||
FROM {{ item.image }}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
|
||||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
|
||||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
|
||||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
|
||||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
|
||||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: Converge
|
|
||||||
hosts: all
|
|
||||||
vars:
|
|
||||||
atl_backup_manifest_url: ''
|
|
||||||
atl_backup_home_restore_canary_path: '/tmp/canary.tmp'
|
|
||||||
|
|
||||||
roles:
|
|
||||||
# Should be no-op
|
|
||||||
- role: restore_backups
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
dependency:
|
|
||||||
name: galaxy
|
|
||||||
driver:
|
|
||||||
name: docker
|
|
||||||
platforms:
|
|
||||||
- name: amazon_linux2
|
|
||||||
image: amazonlinux:2
|
|
||||||
groups:
|
|
||||||
- aws_node_local
|
|
||||||
ulimits:
|
|
||||||
- nofile:262144:262144
|
|
||||||
- name: ubuntu_lts
|
|
||||||
image: ubuntu:bionic
|
|
||||||
groups:
|
|
||||||
- aws_node_local
|
|
||||||
ulimits:
|
|
||||||
- nofile:262144:262144
|
|
||||||
provisioner:
|
|
||||||
name: ansible
|
|
||||||
options:
|
|
||||||
skip-tags: runtime_pkg
|
|
||||||
inventory:
|
|
||||||
links:
|
|
||||||
group_vars: ../../../../group_vars/
|
|
||||||
verifier:
|
|
||||||
name: testinfra
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
import testinfra.utils.ansible_runner
|
|
||||||
|
|
||||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
|
||||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
|
||||||
|
|
||||||
|
|
||||||
def test_no_canary_file(host):
|
|
||||||
assert not host.file('atl_backup_home_restore_canary_path').exists
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# Molecule managed
|
|
||||||
|
|
||||||
{% if item.registry is defined %}
|
|
||||||
FROM {{ item.registry.url }}/{{ item.image }}
|
|
||||||
{% else %}
|
|
||||||
FROM {{ item.image }}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
|
||||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
|
||||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
|
||||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
|
||||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
|
||||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
---
|
|
||||||
- name: Converge
|
|
||||||
hosts: all
|
|
||||||
vars:
|
|
||||||
atl_backup_home_dest: "{{ test_archive }}"
|
|
||||||
atl_backup_id: 'test-backup'
|
|
||||||
atl_backup_manifest_url: 'fake_manifest'
|
|
||||||
atl_backup_home_is_server: 'true'
|
|
||||||
|
|
||||||
atl_product_home_shared: '/media/atl/confluence/shared-home'
|
|
||||||
atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/canary.tmp"
|
|
||||||
atl_product_edition: 'confluence'
|
|
||||||
atl_product_user: 'confluence'
|
|
||||||
atl_product_user_uid: '2001'
|
|
||||||
atl_product_version_cache: "{{ atl_product_home_shared }}/{{ atl_product_edition }}.version"
|
|
||||||
|
|
||||||
test_archive: '/tmp/hello.tar.gz'
|
|
||||||
test_archive_file: 'hello.txt'
|
|
||||||
test_archive_source: '/tmp/hello'
|
|
||||||
|
|
||||||
test_pre_step_prefix: '[PRE-TEST]'
|
|
||||||
test_product_version_file: "/tmp/{{ atl_product_edition }}.version"
|
|
||||||
|
|
||||||
pre_tasks:
|
|
||||||
- name: "{{ test_pre_step_prefix }} Install tar and useradd/groupadd binaries"
|
|
||||||
package:
|
|
||||||
state: present
|
|
||||||
name:
|
|
||||||
- tar
|
|
||||||
- shadow-utils
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create application group"
|
|
||||||
group:
|
|
||||||
name: "{{ atl_product_user }}"
|
|
||||||
gid: "{{ atl_product_user_uid }}"
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create application user"
|
|
||||||
user:
|
|
||||||
name: "{{ atl_product_user }}"
|
|
||||||
uid: "{{ atl_product_user_uid }}"
|
|
||||||
group: "{{ atl_product_user }}"
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create a Conf server home directory structure"
|
|
||||||
file:
|
|
||||||
path: "{{ item }}"
|
|
||||||
state: directory
|
|
||||||
mode: 0755
|
|
||||||
with_items:
|
|
||||||
- "{{ test_archive_source }}"
|
|
||||||
- "{{ test_archive_source }}/attachments"
|
|
||||||
- "{{ test_archive_source }}/shared-home"
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create files"
|
|
||||||
copy:
|
|
||||||
dest: "{{ item }}"
|
|
||||||
content: "content"
|
|
||||||
with_items:
|
|
||||||
- "{{ test_archive_source }}/unwanted.txt"
|
|
||||||
- "{{ test_archive_source }}/attachments/image.jpg"
|
|
||||||
- "{{ test_archive_source }}/shared-home/shared-content.txt"
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Archive the shared home"
|
|
||||||
archive:
|
|
||||||
path:
|
|
||||||
- "{{ test_archive_source }}/*"
|
|
||||||
dest: "{{ test_archive }}"
|
|
||||||
owner: "{{ atl_product_user }}"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
- name: Install distro-specific restore support packages
|
|
||||||
include_tasks: "../../tasks/{{ ansible_distribution|lower }}.yml"
|
|
||||||
|
|
||||||
- name: Restore shared home
|
|
||||||
include_tasks: "../../tasks/home_restore.yml"
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
---
|
|
||||||
dependency:
|
|
||||||
name: galaxy
|
|
||||||
driver:
|
|
||||||
name: docker
|
|
||||||
platforms:
|
|
||||||
- name: amazon_linux2
|
|
||||||
image: amazonlinux:2
|
|
||||||
groups:
|
|
||||||
- aws_node_local
|
|
||||||
ulimits:
|
|
||||||
- nofile:262144:262144
|
|
||||||
provisioner:
|
|
||||||
name: ansible
|
|
||||||
options:
|
|
||||||
skip-tags: runtime_pkg
|
|
||||||
inventory:
|
|
||||||
links:
|
|
||||||
group_vars: ../../../../group_vars/
|
|
||||||
verifier:
|
|
||||||
name: testinfra
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
import os
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import testinfra.utils.ansible_runner
|
|
||||||
|
|
||||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
|
||||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
|
||||||
|
|
||||||
def test_conf_server_converted(host):
|
|
||||||
assert host.file('/media/atl/confluence/shared-home').is_directory
|
|
||||||
assert host.file('/media/atl/confluence/shared-home/shared-content.txt').is_file
|
|
||||||
assert host.file('/media/atl/confluence/shared-home/attachments').is_directory
|
|
||||||
assert host.file('/media/atl/confluence/shared-home/attachments/image.jpg').is_file
|
|
||||||
|
|
||||||
assert not host.file('/media/atl/confluence/shared-home/unwanted.txt').is_file
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# Molecule managed
|
|
||||||
|
|
||||||
{% if item.registry is defined %}
|
|
||||||
FROM {{ item.registry.url }}/{{ item.image }}
|
|
||||||
{% else %}
|
|
||||||
FROM {{ item.image }}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
|
||||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
|
||||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
|
||||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
|
||||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
|
||||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
---
|
|
||||||
- name: Converge
|
|
||||||
hosts: all
|
|
||||||
vars:
|
|
||||||
atl_backup_home_dest: "{{ test_archive }}"
|
|
||||||
atl_backup_home_restore_canary_path: '/tmp/canary.tmp'
|
|
||||||
atl_backup_id: 'test-backup'
|
|
||||||
atl_backup_manifest_url: 'fake_manifest'
|
|
||||||
atl_backup_home_is_server: 'false'
|
|
||||||
|
|
||||||
atl_product_edition: 'jira-software'
|
|
||||||
atl_product_home_shared: '/media/atl/jira/shared'
|
|
||||||
atl_product_user: 'jira'
|
|
||||||
atl_product_user_uid: '2001'
|
|
||||||
atl_product_version_cache: "{{ atl_product_home_shared }}/{{ atl_product_edition }}.version"
|
|
||||||
|
|
||||||
test_archive: '/tmp/hello.tar.gz'
|
|
||||||
test_archive_file: 'hello.txt'
|
|
||||||
test_archive_source: '/tmp/hello'
|
|
||||||
test_pre_step_prefix: '[PRE-TEST]'
|
|
||||||
test_product_version_file: "/tmp/{{ atl_product_edition }}.version"
|
|
||||||
|
|
||||||
pre_tasks:
|
|
||||||
- name: "{{ test_pre_step_prefix }} Install tar"
|
|
||||||
package:
|
|
||||||
state: present
|
|
||||||
name: tar
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Install useradd and groupadd binaries"
|
|
||||||
package:
|
|
||||||
state: present
|
|
||||||
name: shadow-utils
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create application group"
|
|
||||||
group:
|
|
||||||
name: "{{ atl_product_user }}"
|
|
||||||
gid: "{{ atl_product_user_uid }}"
|
|
||||||
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create application user"
|
|
||||||
user:
|
|
||||||
name: "{{ atl_product_user }}"
|
|
||||||
uid: "{{ atl_product_user_uid }}"
|
|
||||||
group: "{{ atl_product_user }}"
|
|
||||||
|
|
||||||
- block:
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create a directory for the shared home archive"
|
|
||||||
file:
|
|
||||||
path: "{{ test_archive_source }}"
|
|
||||||
state: directory
|
|
||||||
mode: 0755
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create a file in the shared home"
|
|
||||||
lineinfile:
|
|
||||||
create: yes
|
|
||||||
line: 'Hello, world!'
|
|
||||||
path: "{{ test_archive_source }}/{{ test_archive_file }}"
|
|
||||||
mode: 0640
|
|
||||||
- name: "{{ test_pre_step_prefix }} Create the version file in the shared home"
|
|
||||||
lineinfile:
|
|
||||||
create: yes
|
|
||||||
line: '8.5'
|
|
||||||
path: "{{ test_product_version_file }}"
|
|
||||||
mode: 0640
|
|
||||||
- name: "{{ test_pre_step_prefix }} Archive the shared home"
|
|
||||||
archive:
|
|
||||||
path:
|
|
||||||
- "{{ test_archive_source }}"
|
|
||||||
- "{{ test_product_version_file }}"
|
|
||||||
dest: "{{ test_archive }}"
|
|
||||||
owner: "{{ atl_product_user }}"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
- name: Install distro-specific restore support packages
|
|
||||||
include_tasks: "../../tasks/{{ ansible_distribution|lower }}.yml"
|
|
||||||
|
|
||||||
- name: Restore shared home
|
|
||||||
include_tasks: "../../tasks/home_restore.yml"
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
---
|
|
||||||
dependency:
|
|
||||||
name: galaxy
|
|
||||||
driver:
|
|
||||||
name: docker
|
|
||||||
platforms:
|
|
||||||
- name: amazon_linux2
|
|
||||||
image: amazonlinux:2
|
|
||||||
groups:
|
|
||||||
- aws_node_local
|
|
||||||
ulimits:
|
|
||||||
- nofile:262144:262144
|
|
||||||
provisioner:
|
|
||||||
name: ansible
|
|
||||||
options:
|
|
||||||
skip-tags: runtime_pkg
|
|
||||||
inventory:
|
|
||||||
links:
|
|
||||||
group_vars: ../../../../group_vars/
|
|
||||||
verifier:
|
|
||||||
name: testinfra
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
import os
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import testinfra.utils.ansible_runner
|
|
||||||
|
|
||||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
|
||||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('exe', [
|
|
||||||
'/usr/bin/pg_dump',
|
|
||||||
'/usr/bin/pg_restore',
|
|
||||||
'/usr/bin/psql'
|
|
||||||
])
|
|
||||||
def test_postgresql_amazon_linux_extras_exes(host, exe):
|
|
||||||
assert host.file(exe).exists
|
|
||||||
|
|
||||||
def test_postgresql_version(host):
|
|
||||||
pg_dump_version_output = host.check_output('pg_dump --version')
|
|
||||||
assert '(PostgreSQL) 9.6' in pg_dump_version_output
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('file', [
|
|
||||||
'/media/atl/jira/shared',
|
|
||||||
'/media/atl/jira/shared/hello',
|
|
||||||
'/media/atl/jira/shared/hello/hello.txt'
|
|
||||||
])
|
|
||||||
def test_shared_home_owner(host, file):
|
|
||||||
assert host.file(file).exists
|
|
||||||
assert host.file(file).user == 'jira'
|
|
||||||
assert host.file(file).group == 'jira'
|
|
||||||
|
|
||||||
def test_file_modes(host):
|
|
||||||
assert host.file('/media/atl/jira/shared/hello').mode == 0o755
|
|
||||||
assert host.file('/media/atl/jira/shared/hello/hello.txt').mode == 0o640
|
|
||||||
|
|
||||||
def test_version_file_owned_by_root(host):
|
|
||||||
assert host.file('/media/atl/jira/shared/jira-software.version').exists
|
|
||||||
assert host.file('/media/atl/jira/shared/jira-software.version').user == 'root'
|
|
||||||
assert host.file('/media/atl/jira/shared/jira-software.version').group == 'root'
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
|
|
||||||
# Amazon Linux 2 supplies extra packages via a special command.
|
|
||||||
- name: Enable Postgresql from 'extras'
|
|
||||||
command: amazon-linux-extras install -y "postgresql{{ postgres_version }}"
|
|
||||||
args:
|
|
||||||
creates: /usr/bin/psql
|
|
||||||
environment:
|
|
||||||
PYTHON: /bin/python
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
---
|
|
||||||
- name: Check for the restore canary file
|
|
||||||
stat:
|
|
||||||
path: "{{ atl_backup_home_restore_canary_path }}"
|
|
||||||
register: restore_canary
|
|
||||||
|
|
||||||
- block:
|
|
||||||
- name: Create shared home if necessary
|
|
||||||
file:
|
|
||||||
path: "{{ atl_product_home_shared }}"
|
|
||||||
state: directory
|
|
||||||
mode: 0750
|
|
||||||
owner: "{{ atl_product_user }}"
|
|
||||||
group: "{{ atl_product_user }}"
|
|
||||||
|
|
||||||
# We also need to use `tar` here as `unarchive` runs `tar` three times doing
|
|
||||||
# idempotence checks, which we can skip.
|
|
||||||
- name: Restore the shared-home backup
|
|
||||||
command:
|
|
||||||
argv:
|
|
||||||
- "tar"
|
|
||||||
- "--extract"
|
|
||||||
- "--file"
|
|
||||||
- "{{ atl_backup_home_dest }}"
|
|
||||||
- "--directory"
|
|
||||||
- "{{ atl_product_home_shared }}"
|
|
||||||
warn: false
|
|
||||||
when: atl_backup_home_is_server is not defined or not atl_backup_home_is_server|bool
|
|
||||||
|
|
||||||
# Use tar transform to convert the Confluence Server (unclustered)
|
|
||||||
# layout to shared-home version. What occurs is:
|
|
||||||
#
|
|
||||||
# * --transform runs first, moving attachments into the shared home.
|
|
||||||
# * --strip-components removes the top-level directory
|
|
||||||
#
|
|
||||||
# NOTE: Also see the `confluence_config` role, which uses
|
|
||||||
# symlinks to support server and clustered layouts
|
|
||||||
# concurrently.
|
|
||||||
- name: Restore a Confluence server home to share-home layout
|
|
||||||
command:
|
|
||||||
argv:
|
|
||||||
- "tar"
|
|
||||||
- "--extract"
|
|
||||||
- "--transform=s,^attachments,shared-home/attachments,"
|
|
||||||
- "--strip-components=1"
|
|
||||||
- "--file"
|
|
||||||
- "{{ atl_backup_home_dest }}"
|
|
||||||
- "--directory"
|
|
||||||
- "{{ atl_product_home_shared }}"
|
|
||||||
warn: false
|
|
||||||
when: atl_backup_home_is_server is defined and atl_backup_home_is_server|bool
|
|
||||||
|
|
||||||
- name: Set shared home owner and group to application user
|
|
||||||
file:
|
|
||||||
path: "{{ atl_product_home_shared }}"
|
|
||||||
recurse: yes
|
|
||||||
group: "{{ atl_product_user }}"
|
|
||||||
owner: "{{ atl_product_user }}"
|
|
||||||
state: directory
|
|
||||||
|
|
||||||
- name: Set version file owner and group to root
|
|
||||||
file:
|
|
||||||
path: "{{ atl_product_version_cache }}"
|
|
||||||
group: root
|
|
||||||
owner: root
|
|
||||||
state: file
|
|
||||||
# Ignore the error in case there is no product version file in the backup
|
|
||||||
ignore_errors: yes
|
|
||||||
|
|
||||||
- name: Create restore-canary if necessary
|
|
||||||
copy:
|
|
||||||
dest: "{{ atl_backup_home_restore_canary_path }}"
|
|
||||||
content: "{{ atl_backup_id }}"
|
|
||||||
|
|
||||||
when: not restore_canary.stat.exists
|
|
||||||
@@ -1,118 +0,0 @@
|
|||||||
---
|
|
||||||
|
|
||||||
# This role will attempt to fetch and load the backup manifest from a
|
|
||||||
# remote S3 URL. On successful completion the contents of JSON or YAML
|
|
||||||
# document will be in the var `atl_backup_manifest`.
|
|
||||||
#
|
|
||||||
# PREREQUISITES:
|
|
||||||
# * `atl_backup_manifest_url` points at the manifest.
|
|
||||||
# * The shared home filesystem is mounted if necessary (e.g. NFS/EFS).
|
|
||||||
# * The database has been created and the variable `db_created` is
|
|
||||||
# registered with the result (i.e: `register: db_created`).
|
|
||||||
#
|
|
||||||
# NOTE: The actual DB/FS restore operations could potentially be split
|
|
||||||
# out into discrete roles, but currently that is not required.
|
|
||||||
#
|
|
||||||
# TODO: Support HTTPS with authentication. Deferred until after the
|
|
||||||
# initial testing release.
|
|
||||||
|
|
||||||
- block:
|
|
||||||
|
|
||||||
- name: Ensure temp directory is present
|
|
||||||
file:
|
|
||||||
path: "{{ atl_installer_temp }}"
|
|
||||||
state: directory
|
|
||||||
mode: 0750
|
|
||||||
owner: "{{ atl_product_user }}"
|
|
||||||
group: "{{ atl_product_user }}"
|
|
||||||
changed_when: false # For Molecule idempotence check
|
|
||||||
|
|
||||||
- name: Parse the manifest URL
|
|
||||||
set_fact:
|
|
||||||
atl_backup_manifest_url: "{{ atl_backup_manifest_url | urlsplit }}"
|
|
||||||
|
|
||||||
- name: Extract manifest file information
|
|
||||||
set_fact:
|
|
||||||
atl_backup_manifest_bucket: "{{ atl_backup_manifest_url.hostname }}"
|
|
||||||
atl_backup_manifest_path: "{{ atl_backup_manifest_url.path }}"
|
|
||||||
atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_url.path | basename }}"
|
|
||||||
|
|
||||||
- name: Fetch the manifest from S3
|
|
||||||
aws_s3:
|
|
||||||
mode: get
|
|
||||||
overwrite: different
|
|
||||||
bucket: "{{ atl_backup_manifest_bucket }}"
|
|
||||||
object: "{{ atl_backup_manifest_path }}"
|
|
||||||
dest: "{{ atl_backup_manifest_dest }}"
|
|
||||||
when: atl_backup_manifest_url.scheme == 's3'
|
|
||||||
|
|
||||||
- name: Load parameters from manifest
|
|
||||||
include_vars:
|
|
||||||
file: "{{ atl_backup_manifest_dest }}"
|
|
||||||
name: atl_backup_manifest
|
|
||||||
|
|
||||||
- name: Define the DB and home dump destinations
|
|
||||||
set_fact:
|
|
||||||
# FIXME: The manifest format is still undecided so the
|
|
||||||
# following usages will need to be updated once it settles..
|
|
||||||
atl_backup_id: "{{ atl_backup_manifest.name }}"
|
|
||||||
atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.db.location.location | basename }}"
|
|
||||||
atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.sharedHome.location.location | basename }}"
|
|
||||||
atl_backup_home_is_server: "{{ atl_backup_manifest.artifacts.sharedHome.serverHome | default(false, true) | bool }}"
|
|
||||||
|
|
||||||
# FIXME: Here we fetch the backups. However we may wish to stream
|
|
||||||
# these directly from S3 to the target DB/FS to avoid requiring
|
|
||||||
# disk-space for the intermediate files.
|
|
||||||
- name: Fetch DB backup from S3
|
|
||||||
aws_s3:
|
|
||||||
mode: get
|
|
||||||
overwrite: different
|
|
||||||
bucket: "{{ atl_backup_manifest.artifacts.db.location.location | urlsplit('hostname') }}"
|
|
||||||
object: "{{ atl_backup_manifest.artifacts.db.location.location | urlsplit('path') }}"
|
|
||||||
# We save the backup as a .tar file so that the postgresql_db module uses pg_restore instead of psql to do restore
|
|
||||||
# This can be removed when ansible 2.10 is released
|
|
||||||
dest: "{{ atl_backup_db_dest }}.tar"
|
|
||||||
|
|
||||||
- name: Fetch Home backup from S3
|
|
||||||
aws_s3:
|
|
||||||
mode: get
|
|
||||||
overwrite: different
|
|
||||||
bucket: "{{ atl_backup_manifest.artifacts.sharedHome.location.location | urlsplit('hostname') }}"
|
|
||||||
object: "{{ atl_backup_manifest.artifacts.sharedHome.location.location | urlsplit('path') }}"
|
|
||||||
dest: "{{ atl_backup_home_dest }}"
|
|
||||||
|
|
||||||
- name: Install distro-specific restore support packages
|
|
||||||
include_tasks: "{{ ansible_distribution|lower }}.yml"
|
|
||||||
|
|
||||||
|
|
||||||
# Restores the application database. If a var with name `atl_force_db_restore` is set to true, the database will be restored even when the database has not been created in the same playbook run.
|
|
||||||
# This is done to accommodate running the restore role independent of the database_init role.
|
|
||||||
- name: Restore application database
|
|
||||||
postgresql_db:
|
|
||||||
login_host: "{{ atl_db_host }}"
|
|
||||||
login_user: "{{ atl_db_root_user }}"
|
|
||||||
login_password: "{{ atl_db_root_password }}"
|
|
||||||
port: "{{ atl_db_port }}"
|
|
||||||
name: "{{ atl_jdbc_db_name }}"
|
|
||||||
owner: "{{ atl_jdbc_user }}"
|
|
||||||
encoding: "{{ atl_jdbc_encoding }}"
|
|
||||||
lc_collate: "{{ atl_jdbc_collation }}"
|
|
||||||
lc_ctype: "{{ atl_jdbc_ctype }}"
|
|
||||||
template: "{{ atl_jdbc_template }}"
|
|
||||||
# Depends on fetch_backup roles
|
|
||||||
state: restore
|
|
||||||
target: "{{ atl_backup_db_dest }}.tar"
|
|
||||||
target_opts: "-Fc"
|
|
||||||
register: result
|
|
||||||
# managed DBs in cloud providers are not allowing full root access to the DB engine, we can safely ignore the COMMENT ON EXTENSION error
|
|
||||||
failed_when:
|
|
||||||
- result.rc != 0
|
|
||||||
- '"COMMENT ON EXTENSION" not in result.msg'
|
|
||||||
# default('false', true) filter makes the default filter return the specified default value for python False-y values (like an empty string)
|
|
||||||
when: atl_backup_db_dest is defined and (db_created.changed or (atl_force_db_restore | default('false', true) | bool))
|
|
||||||
|
|
||||||
- name: Restore shared home
|
|
||||||
include_tasks: "home_restore.yml"
|
|
||||||
|
|
||||||
|
|
||||||
when: atl_restore_required
|
|
||||||
Reference in New Issue
Block a user