mirror of
https://bitbucket.org/atlassian/dc-deployments-automation.git
synced 2025-12-14 00:43:06 -06:00
Merge branch 'master' into feature/ITOPSENG-277-itops-required-changes-for-confluence
This commit is contained in:
@@ -5,16 +5,19 @@
|
||||
src: server.xml.j2
|
||||
dest: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/server.xml"
|
||||
|
||||
- name: Override JVM memory settings.
|
||||
# Ugly but necessary as the product installs this file so we need to make the change here.
|
||||
- name: Set the minimum heap size (Xms)
|
||||
lineinfile:
|
||||
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
|
||||
backrefs: true
|
||||
regexp: "^{{ item }}="
|
||||
line: "{{ item }}=\"{{ atl_jvm_heap }}\""
|
||||
with_items:
|
||||
- 'JVM_MINIMUM_MEMORY'
|
||||
- 'JVM_MAXIMUM_MEMORY'
|
||||
regexp: '^(.*)Xms(\d+\w)(\s.*)$'
|
||||
line: '\1Xms{{ atl_jvm_heap }}\3'
|
||||
backrefs: yes
|
||||
|
||||
- name: Set the maxmimum heap size (Xmx)
|
||||
lineinfile:
|
||||
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
|
||||
regexp: '^(.*)Xmx(\d+\w)(\s.*)$'
|
||||
line: '\1Xmx{{ atl_jvm_heap }}\3'
|
||||
backrefs: yes
|
||||
|
||||
- name: Set Crowd home directory in crowd-init.properties file
|
||||
lineinfile:
|
||||
@@ -27,7 +30,13 @@
|
||||
insertafter: "EOF"
|
||||
line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"'
|
||||
|
||||
- name: Set JAVA_HOME #FIXME
|
||||
- name: Set the Crowd node name via CATALINA_OPTS
|
||||
lineinfile:
|
||||
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
|
||||
insertafter: "EOF"
|
||||
line: export CATALINA_OPTS="${CATALINA_OPTS} -Dcluster.node.name={{ ansible_ec2_instance_id }}-{{ ansible_ec2_local_ipv4 }}"
|
||||
|
||||
- name: Set JAVA_HOME
|
||||
lineinfile:
|
||||
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
|
||||
insertafter: "EOF"
|
||||
|
||||
@@ -1,40 +1,40 @@
|
||||
---
|
||||
|
||||
- name: Create application DB user
|
||||
postgresql_user:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
port: "{{ atl_db_port }}"
|
||||
name: "{{ atl_jdbc_user }}"
|
||||
password: "{{ atl_jdbc_password }}"
|
||||
expires: 'infinity'
|
||||
tags:
|
||||
- new_only
|
||||
- block:
|
||||
|
||||
- name: Update root privs for new user
|
||||
postgresql_privs:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
database: postgres
|
||||
roles: "{{ atl_db_root_user }}"
|
||||
objs: "{{ atl_jdbc_user }}"
|
||||
type: group
|
||||
tags:
|
||||
- new_only
|
||||
- name: Create application DB user
|
||||
postgresql_user:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
port: "{{ atl_db_port }}"
|
||||
name: "{{ atl_jdbc_user }}"
|
||||
password: "{{ atl_jdbc_password }}"
|
||||
expires: 'infinity'
|
||||
|
||||
- name: Update root privs for new user
|
||||
postgresql_privs:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
database: postgres
|
||||
roles: "{{ atl_db_root_user }}"
|
||||
objs: "{{ atl_jdbc_user }}"
|
||||
type: group
|
||||
|
||||
- name: Create new application database
|
||||
postgresql_db:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
port: "{{ atl_db_port }}"
|
||||
name: "{{ atl_jdbc_db_name }}"
|
||||
owner: "{{ atl_jdbc_user }}"
|
||||
encoding: "{{ atl_jdbc_encoding }}"
|
||||
lc_collate: "{{ atl_jdbc_collation }}"
|
||||
lc_ctype: "{{ atl_jdbc_ctype }}"
|
||||
template: "{{ atl_jdbc_template }}"
|
||||
register: db_created
|
||||
|
||||
- name: Create application database
|
||||
postgresql_db:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
port: "{{ atl_db_port }}"
|
||||
name: "{{ atl_jdbc_db_name }}"
|
||||
owner: "{{ atl_jdbc_user }}"
|
||||
encoding: "{{ atl_jdbc_encoding }}"
|
||||
lc_collate: "{{ atl_jdbc_collation }}"
|
||||
lc_ctype: "{{ atl_jdbc_ctype }}"
|
||||
template: "{{ atl_jdbc_template }}"
|
||||
tags:
|
||||
- new_only
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
---
|
||||
|
||||
# This role will attempt to fetch and load the backup manifest from a
|
||||
# remote HTTP or S3 URL. On successful completion the contents of JSON
|
||||
# or YAML document will be in the var `atl_backup_manifest`.
|
||||
|
||||
- block:
|
||||
|
||||
- name: Ensure temp directory is present
|
||||
file:
|
||||
path: "{{ atl_installer_temp }}"
|
||||
state: directory
|
||||
mode: 0750
|
||||
owner: "{{ atl_product_user }}"
|
||||
group: "{{ atl_product_user }}"
|
||||
changed_when: false # For Molecule idempotence check
|
||||
|
||||
- name: Parse the manifest URL
|
||||
set_fact:
|
||||
atl_backup_manifest_url: "{{ atl_backup_manifest_url | urlsplit }}"
|
||||
|
||||
- name: Extract manifest file information
|
||||
set_fact:
|
||||
atl_backup_manifest_bucket: "{{ atl_backup_manifest_url.hostname }}"
|
||||
atl_backup_manifest_path: "{{ atl_backup_manifest_url.path }}"
|
||||
atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_url.path | basename }}"
|
||||
|
||||
- name: Fetch the manifest from S3
|
||||
aws_s3:
|
||||
mode: get
|
||||
bucket: "{{ atl_backup_manifest_bucket }}"
|
||||
object: "{{ atl_backup_manifest_path }}"
|
||||
dest: "{{ atl_backup_manifest_dest }}"
|
||||
when: atl_backup_manifest_url.scheme == 's3'
|
||||
|
||||
- name: Fetch the manifest from remote host
|
||||
get_url:
|
||||
url: "{{ atl_backup_manifest_url }}"
|
||||
dest: "{{ atl_backup_manifest_dest }}"
|
||||
when: atl_backup_manifest_url.scheme != 's3'
|
||||
|
||||
- name: Load parameters from manifest
|
||||
include_vars:
|
||||
file: "{{ atl_backup_manifest_dest }}"
|
||||
name: atl_backup_manifest
|
||||
|
||||
- name: Define the DB and home dump destinations
|
||||
set_fact:
|
||||
atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.db_dump | basename }}"
|
||||
atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.shared_home_dump | basename }}"
|
||||
|
||||
# FIXME: Here we fetch the backups. However we may wish to stream
|
||||
# these directly from S3 to the target DB/FS to avoid requiring
|
||||
# disk-space for the intermediate files.
|
||||
- name: Fetch DB backup from S3
|
||||
aws_s3:
|
||||
mode: get
|
||||
bucket: "{{ atl_backup_manifest.db_dump | urlsplit('hostname') }}"
|
||||
object: "{{ atl_backup_manifest.db_dump | urlsplit('path') }}"
|
||||
dest: "{{ atl_backup_db_dest }}"
|
||||
|
||||
- name: Fetch Home backup from S3
|
||||
aws_s3:
|
||||
mode: get
|
||||
bucket: "{{ atl_backup_manifest.shared_home_dump | urlsplit('hostname') }}"
|
||||
object: "{{ atl_backup_manifest.shared_home_dump | urlsplit('path') }}"
|
||||
dest: "{{ atl_backup_home_dest }}"
|
||||
|
||||
when: atl_backup_manifest_url is defined and atl_backup_manifest_url != ''
|
||||
@@ -19,10 +19,16 @@ atl_product_base_url: "{{ atl_release_base_url }}/{{ atl_product_family }}/downl
|
||||
atl_product_download_url: "{{ atl_product_base_url }}/atlassian-{{ atl_download_edition | default(atl_product_edition) }}-{{ atl_product_version }}{{ atl_download_suffix }}"
|
||||
|
||||
atl_product_download_filename: "{{ atl_download_edition | default(atl_product_edition) }}.{{ atl_product_version }}{{ atl_download_suffix }}"
|
||||
atl_product_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}"
|
||||
atl_product_temp_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}"
|
||||
atl_product_varfile: "{{ atl_installer_temp }}/{{ atl_product_family }}.varfile"
|
||||
|
||||
atl_product_home_shared_download_dir: "{{ atl_shared_mountpoint }}/downloads"
|
||||
atl_product_home_shared_download: "{{ atl_product_home_shared_download_dir }}/{{ atl_product_download_filename }}"
|
||||
atl_product_home_shared_moving_lock: "{{ atl_product_home_shared_download }}_moving"
|
||||
atl_product_home_shared_completed_lock: "{{ atl_product_home_shared_download }}_completed"
|
||||
|
||||
atl_marketplace_base: "https://marketplace.atlassian.com"
|
||||
atl_mpac_products: "https://marketplace.atlassian.com/rest/2/products"
|
||||
atl_servicedesk_latest_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/latest"
|
||||
atl_servicedesk_versioned_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/name/{{ atl_product_version }}"
|
||||
atl_servicedesk_url_map:
|
||||
|
||||
@@ -37,6 +37,16 @@ def test_latest_is_downloaded(host):
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/opt/atlassian/tmp/bitbucket.' + upstream + '-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/bitbucket.' + upstream + '-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen(
|
||||
"https://marketplace.atlassian.com/rest/2/applications/bitbucket/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/bitbucket.' + upstream + '-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
@@ -35,6 +35,15 @@ def test_latest_is_downloaded(host):
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/opt/atlassian/tmp/confluence.'+upstream+'-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/confluence.'+upstream+'-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/confluence/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/confluence.'+upstream+'-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
14
roles/product_install/molecule/crowd_latest/Dockerfile.j2
Normal file
14
roles/product_install/molecule/crowd_latest/Dockerfile.j2
Normal file
@@ -0,0 +1,14 @@
|
||||
# Molecule managed
|
||||
|
||||
{% if item.registry is defined %}
|
||||
FROM {{ item.registry.url }}/{{ item.image }}
|
||||
{% else %}
|
||||
FROM {{ item.image }}
|
||||
{% endif %}
|
||||
|
||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
||||
30
roles/product_install/molecule/crowd_latest/molecule.yml
Normal file
30
roles/product_install/molecule/crowd_latest/molecule.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
dependency:
|
||||
name: galaxy
|
||||
driver:
|
||||
name: docker
|
||||
lint:
|
||||
name: yamllint
|
||||
platforms:
|
||||
- name: amazon_linux2
|
||||
image: amazonlinux:2
|
||||
groups:
|
||||
- aws_node_local
|
||||
- name: ubuntu_lts
|
||||
image: ubuntu:bionic
|
||||
groups:
|
||||
- aws_node_local
|
||||
provisioner:
|
||||
name: ansible
|
||||
options:
|
||||
skip-tags: runtime_pkg
|
||||
lint:
|
||||
name: ansible-lint
|
||||
inventory:
|
||||
links:
|
||||
group_vars: ../../../../group_vars/
|
||||
verifier:
|
||||
name: testinfra
|
||||
lint:
|
||||
name: flake8
|
||||
enabled: false
|
||||
12
roles/product_install/molecule/crowd_latest/playbook.yml
Normal file
12
roles/product_install/molecule/crowd_latest/playbook.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
- name: Converge
|
||||
hosts: all
|
||||
vars:
|
||||
atl_product_family: "crowd"
|
||||
atl_product_edition: "crowd"
|
||||
atl_product_user: "crowd"
|
||||
atl_download_format: "tarball"
|
||||
roles:
|
||||
- role: linux_common
|
||||
- role: product_common
|
||||
- role: product_install
|
||||
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
from six.moves import urllib
|
||||
import json
|
||||
|
||||
import testinfra.utils.ansible_runner
|
||||
|
||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
||||
|
||||
def test_version_downloaded(host):
|
||||
verfile = host.file('/media/atl/crowd/shared/crowd.version')
|
||||
assert verfile.exists
|
||||
|
||||
def test_symlink_created(host):
|
||||
target = host.file('/opt/atlassian/crowd/current')
|
||||
assert target.exists
|
||||
assert target.is_symlink
|
||||
|
||||
def test_unpacked(host):
|
||||
verfile = host.file('/opt/atlassian/crowd/current/start_crowd.sh')
|
||||
assert verfile.exists
|
||||
|
||||
def test_version_file_is_latest(host):
|
||||
verfile = host.file('/media/atl/crowd/shared/crowd.version')
|
||||
assert verfile.exists
|
||||
|
||||
upstream_fd = urllib.request.urlopen(
|
||||
"https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
assert verfile.content.decode("UTF-8").strip() == upstream.strip()
|
||||
|
||||
def test_latest_is_downloaded(host):
|
||||
upstream_fd = urllib.request.urlopen(
|
||||
"https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/media/atl/downloads/crowd.' + upstream + '.tar.gz')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen(
|
||||
"https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/crowd.' + upstream + '.tar.gz_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
@@ -23,6 +23,15 @@ def test_latest_is_downloaded(host):
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-core.'+upstream+'-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.'+upstream+'-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
14
roles/product_install/molecule/jira_all/Dockerfile.j2
Normal file
14
roles/product_install/molecule/jira_all/Dockerfile.j2
Normal file
@@ -0,0 +1,14 @@
|
||||
# Molecule managed
|
||||
|
||||
{% if item.registry is defined %}
|
||||
FROM {{ item.registry.url }}/{{ item.image }}
|
||||
{% else %}
|
||||
FROM {{ item.image }}
|
||||
{% endif %}
|
||||
|
||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
||||
30
roles/product_install/molecule/jira_all/molecule.yml
Normal file
30
roles/product_install/molecule/jira_all/molecule.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
dependency:
|
||||
name: galaxy
|
||||
driver:
|
||||
name: docker
|
||||
lint:
|
||||
name: yamllint
|
||||
platforms:
|
||||
- name: amazon_linux2
|
||||
image: amazonlinux:2
|
||||
groups:
|
||||
- aws_node_local
|
||||
- name: ubuntu_lts
|
||||
image: ubuntu:bionic
|
||||
groups:
|
||||
- aws_node_local
|
||||
provisioner:
|
||||
name: ansible
|
||||
options:
|
||||
skip-tags: runtime_pkg
|
||||
lint:
|
||||
name: ansible-lint
|
||||
inventory:
|
||||
links:
|
||||
group_vars: ../../../../group_vars/
|
||||
verifier:
|
||||
name: testinfra
|
||||
lint:
|
||||
name: flake8
|
||||
enabled: false
|
||||
28
roles/product_install/molecule/jira_all/playbook.yml
Normal file
28
roles/product_install/molecule/jira_all/playbook.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
- name: Converge
|
||||
hosts: all
|
||||
vars:
|
||||
atl_product_family: "jira"
|
||||
atl_product_edition: "jira-software"
|
||||
atl_product_user: "jira"
|
||||
atl_product_version: "7.13.1"
|
||||
atl_install_jsd_as_obr: true
|
||||
atl_systemd_service_name: "jira.service"
|
||||
atl_jdbc_encoding: 'UNICODE'
|
||||
atl_jdbc_collation: 'C'
|
||||
atl_jdbc_ctype: 'C'
|
||||
atl_jdbc_template: 'template0'
|
||||
pre_tasks:
|
||||
- name: Create cache dir
|
||||
file:
|
||||
path: '/media/atl/jira/shared/'
|
||||
state: directory
|
||||
- name: Seed version
|
||||
copy:
|
||||
dest: '/media/atl/jira/shared/jira-core.version'
|
||||
content: "7.13.1"
|
||||
force: false # For idempotency check
|
||||
roles:
|
||||
- role: linux_common
|
||||
- role: product_common
|
||||
- role: product_install
|
||||
@@ -0,0 +1,48 @@
|
||||
import os
|
||||
from six.moves import urllib
|
||||
|
||||
import testinfra.utils.ansible_runner
|
||||
import json
|
||||
|
||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
||||
|
||||
|
||||
def test_version_is_correct(host):
|
||||
verfile = host.file('/media/atl/jira/shared/jira-software.version')
|
||||
assert verfile.exists
|
||||
|
||||
assert verfile.content.decode("UTF-8").strip() == "7.13.1"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/media/atl/downloads/jira-software.7.13.1-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/jira-software.7.13.1-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-software/7.13.1/atlassian-jira/')
|
||||
assert installer.exists
|
||||
assert installer.is_directory
|
||||
assert installer.user == 'jira'
|
||||
assert installer.mode == 0o0755
|
||||
|
||||
def test_obr_is_downloaded(host):
|
||||
installer = host.file('/media/atl/downloads/jira-servicedesk-application-3.16.1.obr')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_obr_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/jira-servicedesk-application-3.16.1.obr_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_obr_is_unpacked(host):
|
||||
jsdjar = host.file('/media/atl/jira/shared/plugins/installed-plugins/jira-servicedesk-application-3.16.1.jar')
|
||||
assert jsdjar.exists
|
||||
assert jsdjar.user == 'jira'
|
||||
assert jsdjar.mode == 0o0750
|
||||
@@ -14,10 +14,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == "7.10.2"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.7.10.2-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-core.7.10.2-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.7.10.2-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-core/7.10.2/atlassian-jira/')
|
||||
assert installer.exists
|
||||
|
||||
@@ -14,10 +14,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == "7.10.1"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.7.10.1-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-core.7.10.1-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.7.10.1-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-core/7.10.1/atlassian-jira/')
|
||||
assert installer.exists
|
||||
|
||||
@@ -35,6 +35,15 @@ def test_latest_is_downloaded(host):
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/opt/atlassian/tmp/jira-software.'+upstream+'-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-software.'+upstream+'-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/jira-software.'+upstream+'-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
@@ -23,6 +23,15 @@ def test_latest_is_downloaded(host):
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'.tar.gz')
|
||||
installer = host.file('/media/atl/downloads/jira-core.'+upstream+'.tar.gz')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.'+upstream+'.tar.gz_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
@@ -14,10 +14,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == "7.9.0"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.7.9.0-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-core.7.9.0-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.7.9.0-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-core/7.9.0/atlassian-jira/')
|
||||
assert installer.exists
|
||||
|
||||
@@ -23,6 +23,15 @@ def test_latest_is_downloaded(host):
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-core.'+upstream+'-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest")
|
||||
upstream_json = json.load(upstream_fd)
|
||||
upstream = upstream_json['version']
|
||||
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.'+upstream+'-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
@@ -14,10 +14,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == "7.13.2"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/jira-core.7.13.2-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/jira-core.7.13.2-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/jira-core.7.13.2-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-core/7.13.2')
|
||||
assert installer.exists
|
||||
|
||||
@@ -14,10 +14,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == "3.9.0"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/servicedesk.3.9.0-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/servicedesk.3.9.0-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/servicedesk.3.9.0-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-servicedesk/3.9.0')
|
||||
assert installer.exists
|
||||
|
||||
@@ -14,10 +14,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == "4.1.0"
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/servicedesk.4.1.0-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/servicedesk.4.1.0-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/servicedesk.4.1.0-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-servicedesk/4.1.0')
|
||||
assert installer.exists
|
||||
|
||||
@@ -23,10 +23,15 @@ def test_version_is_correct(host):
|
||||
assert verfile.content.decode("UTF-8").strip() == sd
|
||||
|
||||
def test_is_downloaded(host):
|
||||
installer = host.file('/opt/atlassian/tmp/servicedesk.'+sd+'-x64.bin')
|
||||
installer = host.file('/media/atl/downloads/servicedesk.'+sd+'-x64.bin')
|
||||
assert installer.exists
|
||||
assert installer.user == 'root'
|
||||
|
||||
def test_completed_lockfile(host):
|
||||
lockfile = host.file('/media/atl/downloads/servicedesk.'+sd+'-x64.bin_completed')
|
||||
assert lockfile.exists
|
||||
assert lockfile.user == 'root'
|
||||
|
||||
def test_is_unpacked(host):
|
||||
installer = host.file('/opt/atlassian/jira-servicedesk/'+sd)
|
||||
assert installer.exists
|
||||
|
||||
1
roles/product_install/tasks/jira-all_extra_tasks.yml
Symbolic link
1
roles/product_install/tasks/jira-all_extra_tasks.yml
Symbolic link
@@ -0,0 +1 @@
|
||||
no_op.yml
|
||||
174
roles/product_install/tasks/jira-servicedesk_as_obr.yml
Normal file
174
roles/product_install/tasks/jira-servicedesk_as_obr.yml
Normal file
@@ -0,0 +1,174 @@
|
||||
---
|
||||
|
||||
- name: Get the installer product version info
|
||||
uri:
|
||||
url: "{{ atl_mpac_products }}/key/jira-software/versions/name/{{ atl_product_version }}"
|
||||
return_content: yes
|
||||
register: atl_product_version_info
|
||||
|
||||
- name: Show the returned build number
|
||||
debug:
|
||||
msg="buildNumber={{ atl_product_version_info.json.buildNumber }}"
|
||||
|
||||
- name: Get the JSD build version info
|
||||
uri:
|
||||
url: "{{ atl_mpac_products }}/key/jira-servicedesk/versions/latest?application=\
|
||||
jira&applicationBuild={{ atl_product_version_info.json.buildNumber }}"
|
||||
return_content: yes
|
||||
register: atl_jsd_build_info
|
||||
|
||||
- name: Show the returned obr binary href
|
||||
debug:
|
||||
msg="obr_ref={{ atl_jsd_build_info.json._embedded.artifact._links.binary.href }}"
|
||||
|
||||
- name: how about getting the obr filename
|
||||
debug:
|
||||
msg="obr_name=jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.obr"
|
||||
|
||||
- name: is shared_home set ?
|
||||
debug:
|
||||
msg="atl_product_home_shared_download_dir={{ atl_product_home_shared_download_dir }}"
|
||||
# For the first run a temp obr should be downloaded but moved to
|
||||
# shared home to ensure all subsequent nodes have access
|
||||
# to the same specific version binary.
|
||||
# To prevent a race condition with multiple downloads at the same time
|
||||
# a directory is used as a lockfile (atomic operation) when moving obr.
|
||||
|
||||
- name: Set assumptions to avoid race condition
|
||||
set_fact:
|
||||
download_obr: true
|
||||
move_obr: false
|
||||
atl_obr_download_href: "{{ atl_jsd_build_info.json._embedded.artifact._links.binary.href }}"
|
||||
atl_obr_filename: "jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.obr"
|
||||
atl_obr_download: "{{ atl_installer_temp }}/jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.obr"
|
||||
atl_obr_shared_download: "{{ atl_product_home_shared_download_dir }}/jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.obr"
|
||||
atl_obr_moving_lock: "{{ atl_product_home_shared_download_dir }}/jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.obr_moving"
|
||||
atl_obr_completed_lock: "{{ atl_product_home_shared_download_dir }}/jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.obr_completed"
|
||||
|
||||
# Check for pre-downloaded obr on shared_home and completed lock dir.
|
||||
- name: Check for completed lock directory
|
||||
stat:
|
||||
path: "{{ atl_obr_completed_lock }}"
|
||||
register: completed_lock
|
||||
|
||||
- name: Check for obr in home_shared
|
||||
stat:
|
||||
path: "{{ atl_obr_shared_download }}"
|
||||
register: home_shared_download
|
||||
|
||||
# If obr exists and lockdir exists use this obr instead
|
||||
- name: Check lock directory and obr exists on shared_home
|
||||
set_fact:
|
||||
download_obr: false
|
||||
atl_obr_download: "{{ atl_obr_shared_download }}"
|
||||
when:
|
||||
- home_shared_download.stat.exists
|
||||
- completed_lock.stat.isdir is defined
|
||||
- completed_lock.stat.isdir
|
||||
|
||||
# Fetch obr if required
|
||||
- name: download_obr is true so fetch and do all the things
|
||||
block:
|
||||
|
||||
# Fetch obr and copy to temp
|
||||
- name: Fetch obr
|
||||
get_url:
|
||||
url: "{{ atl_obr_download_href }}"
|
||||
dest: "{{ atl_obr_download }}"
|
||||
mode: 0755
|
||||
force: false
|
||||
register: atl_obr_completed
|
||||
|
||||
# If obr was fetched make the lock directory
|
||||
- name: Create moving_lock.
|
||||
file:
|
||||
path: "{{ atl_obr_moving_lock }}"
|
||||
state: directory
|
||||
when:
|
||||
- atl_obr_completed is succeeded
|
||||
register: moving_lock_created
|
||||
|
||||
# Directory lock was created by this run?
|
||||
# If so, then set a fact intending to move obr
|
||||
- name: Move obr Scenario - lock created by this run
|
||||
set_fact:
|
||||
move_obr: true
|
||||
when:
|
||||
- moving_lock_created is succeeded
|
||||
- moving_lock_created.changed
|
||||
# Otherwise directory lock was either already created or
|
||||
# could not be created. Fall back is to continue and install from temp
|
||||
|
||||
when: download_obr
|
||||
|
||||
# If the intention is to move obr to home_shared
|
||||
- name: Move obr to home_shared
|
||||
block:
|
||||
|
||||
- name: Copy temp installer to home_shared
|
||||
copy:
|
||||
src: "{{ atl_obr_download }}"
|
||||
dest: "{{ atl_obr_shared_download }}"
|
||||
remote_src: true
|
||||
register: copied
|
||||
|
||||
- name: Create completed_lock once obr downloaded and copied
|
||||
file:
|
||||
path: "{{ atl_obr_completed_lock }}"
|
||||
state: directory
|
||||
when: copied is succeeded
|
||||
register: completed_lock_created
|
||||
|
||||
- name: Remove moving_lock to show that obr is completed
|
||||
file:
|
||||
path: "{{ atl_obr_moving_lock }}"
|
||||
state: absent
|
||||
when:
|
||||
- completed_lock_created is succeeded
|
||||
- copied is succeeded
|
||||
register: moving_lock_removed
|
||||
|
||||
- name: Delete old temp installer
|
||||
file:
|
||||
path: "{{ atl_obr_download }}"
|
||||
state: absent
|
||||
when: moving_lock_removed is succeeded
|
||||
register: temp_deleted
|
||||
|
||||
- name: Set install to home_shared location
|
||||
set_fact:
|
||||
atl_obr_download: "{{ atl_obr_shared_download }}"
|
||||
when: temp_deleted is succeeded
|
||||
|
||||
when: move_obr
|
||||
|
||||
# At this point the binary is in {{ atl_obr_download }}
|
||||
# (which is either on home_shared or temp)
|
||||
|
||||
- name: Ensure instaled-plugins dir exists
|
||||
file:
|
||||
path: "{{ atl_product_home_shared }}/plugins/installed-plugins"
|
||||
state: directory
|
||||
mode: 0750
|
||||
owner: "{{ atl_product_user }}"
|
||||
group: "{{ atl_product_user }}"
|
||||
|
||||
# Note as ansible unarchive cant handle "-j junk paths" we need to ignore errors to bypass the path verify
|
||||
- name: Unpack the obr into the installed-plugins dir
|
||||
unarchive:
|
||||
remote_src: true
|
||||
src: "{{ atl_obr_download }}"
|
||||
dest: "{{ atl_product_home_shared }}/plugins/installed-plugins"
|
||||
creates: "{{ atl_product_home_shared }}/plugins/installed-plugins/jira-servicedesk-application-{{ atl_jsd_build_info.json.name }}.jar"
|
||||
list_files: no
|
||||
exclude:
|
||||
- M*
|
||||
owner: "{{ atl_product_user }}"
|
||||
group: "{{ atl_product_user }}"
|
||||
mode: 0750
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Move JSD dependency jars into the installed-plugins dir
|
||||
shell:
|
||||
cmd: "mv {{ atl_product_home_shared }}/plugins/installed-plugins/dependencies/*.jar {{ atl_product_home_shared }}/plugins/installed-plugins"
|
||||
creates: "{{ atl_product_home_shared }}/plugins/installed-plugins/servicedesk-core-ui-plugin-{{ atl_jsd_build_info.json.name }}-*.jar"
|
||||
@@ -116,6 +116,7 @@
|
||||
- "{{ atl_product_home }}"
|
||||
- "{{ atl_product_installation_versioned }}"
|
||||
- "{{ atl_product_version_cache_dir }}"
|
||||
- "{{ atl_product_home_shared_download_dir }}"
|
||||
changed_when: false # For Molecule idempotence check
|
||||
|
||||
# At this point atl_product_version should be set, cache if necessary.
|
||||
@@ -125,18 +126,120 @@
|
||||
dest: "{{ atl_product_version_cache }}"
|
||||
force: true
|
||||
|
||||
# For the first run a temp binary should be downloaded but moved to
|
||||
# shared home to ensure all subsequent nodes have access
|
||||
# to the same specific version binary.
|
||||
# To prevent a race condition with multiple downloads at the same time
|
||||
# a directory is used as a lockfile (atomic operation) when moving binary.
|
||||
|
||||
# Note: We don't the cache binary in the shared drive to the complexity
|
||||
# around download race-conditions if multiple nodes are starting at
|
||||
# the same time. When downloading from product-downloads.atlassian.com
|
||||
# (which is a CDN) takes seconds anyway.
|
||||
- name: Fetch product installer
|
||||
get_url:
|
||||
url: "{{ atl_product_download_url }}"
|
||||
dest: "{{ atl_product_download }}"
|
||||
mode: 0755
|
||||
force: false
|
||||
- name: Set assumptions to avoid race condition
|
||||
set_fact:
|
||||
download_binary: true
|
||||
move_binary: false
|
||||
atl_product_download: "{{ atl_product_temp_download }}"
|
||||
|
||||
# Check for pre-downloaded binary on shared_home and completed lock dir.
|
||||
- name: Check for completed lock directory
|
||||
stat:
|
||||
path: "{{ atl_product_home_shared_completed_lock }}"
|
||||
register: completed_lock
|
||||
|
||||
- name: Check for product installer in home_shared
|
||||
stat:
|
||||
path: "{{ atl_product_home_shared_download }}"
|
||||
register: home_shared_download
|
||||
|
||||
# If binary exists and lockdir exists use this binary instead
|
||||
- name: Check lock directory and binary exists on shared_home
|
||||
set_fact:
|
||||
download_binary: false
|
||||
atl_product_download: "{{ atl_product_home_shared_download }}"
|
||||
when:
|
||||
- home_shared_download.stat.exists
|
||||
- completed_lock.stat.isdir is defined
|
||||
- completed_lock.stat.isdir
|
||||
|
||||
# Fetch binary if required
|
||||
- name: download_binary is true so fetch and do all the things
|
||||
block:
|
||||
|
||||
# Fetch binary and copy to temp
|
||||
- name: Fetch binary
|
||||
get_url:
|
||||
url: "{{ atl_product_download_url }}"
|
||||
dest: "{{ atl_product_temp_download }}"
|
||||
mode: 0755
|
||||
force: false
|
||||
register: atl_product_completed
|
||||
|
||||
# If product installer was fetched make the lock directory
|
||||
- name: Create moving_lock.
|
||||
file:
|
||||
path: "{{ atl_product_home_shared_moving_lock }}"
|
||||
state: directory
|
||||
when:
|
||||
- atl_product_completed is succeeded
|
||||
register: moving_lock_created
|
||||
|
||||
# Directory lock was created by this run?
|
||||
# If so, then set a fact intending to move binary
|
||||
- name: Move binary Scenario - lock created by this run
|
||||
set_fact:
|
||||
move_binary: true
|
||||
when:
|
||||
- moving_lock_created is succeeded
|
||||
- moving_lock_created.changed
|
||||
# Otherwise directory lock was either already created or
|
||||
# could not be created. Fall back is to continue and install from temp
|
||||
|
||||
when: download_binary
|
||||
|
||||
# If the intention is to move binary to home_shared
|
||||
- name: Move product installer to home_shared
|
||||
block:
|
||||
|
||||
- name: Copy temp installer to home_shared
|
||||
copy:
|
||||
src: "{{ atl_product_temp_download }}"
|
||||
dest: "{{ atl_product_home_shared_download }}"
|
||||
remote_src: true
|
||||
when:
|
||||
- moving_lock_created is succeeded
|
||||
- moving_lock_created.changed
|
||||
register: copied
|
||||
|
||||
- name: Create completed_lock once product installer downloaded and copied
|
||||
file:
|
||||
path: "{{ atl_product_home_shared_completed_lock }}"
|
||||
state: directory
|
||||
when: copied is succeeded
|
||||
register: completed_lock_created
|
||||
|
||||
- name: Remove moving_lock to show that binary is completed
|
||||
file:
|
||||
path: "{{ atl_product_home_shared_moving_lock }}"
|
||||
state: absent
|
||||
when:
|
||||
- completed_lock_created is succeeded
|
||||
- copied is succeeded
|
||||
register: moving_lock_removed
|
||||
|
||||
- name: Delete old temp installer
|
||||
file:
|
||||
path: "{{ atl_product_temp_download }}"
|
||||
state: absent
|
||||
when: moving_lock_removed is succeeded
|
||||
register: temp_deleted
|
||||
|
||||
- name: Set install to home_shared location
|
||||
set_fact:
|
||||
atl_product_download: "{{ atl_product_home_shared_download }}"
|
||||
when: temp_deleted is succeeded
|
||||
|
||||
when: move_binary
|
||||
|
||||
# At this point the binary is in {{ atl_product_download }}
|
||||
# (which is either on home_shared or temp)
|
||||
- name: Unpack the downloaded application depending on format
|
||||
include_tasks: "unpack_{{ atl_download_format }}.yml"
|
||||
|
||||
@@ -146,3 +249,9 @@
|
||||
dest: "{{ atl_product_installation_current }}"
|
||||
state: link
|
||||
force: true
|
||||
|
||||
- name: Include if jsd is requested to be installed from OBR
|
||||
include_tasks: "jira-servicedesk_as_obr.yml"
|
||||
when:
|
||||
- atl_install_jsd_as_obr is defined
|
||||
- atl_install_jsd_as_obr
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
# actions. For example, if root and the 'jira' user exists then it
|
||||
# will create 'jira1'; this potentially creates idempotency/upgrade
|
||||
# issues down the line.
|
||||
# The variable {{ atl_product_download }} will be on temp for first nodes and shared_home for
|
||||
# subsequent nodes.
|
||||
- name: Run the installer
|
||||
command: /bin/sh "{{ atl_product_download }}" -q -varfile "{{ atl_product_varfile }}"
|
||||
args:
|
||||
|
||||
@@ -14,5 +14,6 @@ atl_startup_exec_path: "{{ atl_product_installation_current }}/{{ atl_startup_sc
|
||||
atl_startup_exec_options: ["-fg"]
|
||||
atl_startup_systemd_params: []
|
||||
|
||||
|
||||
atl_systemd_service_name: "{{ atl_product_edition }}.service"
|
||||
|
||||
atl_systemd_service_target: "multi-user.target"
|
||||
|
||||
@@ -15,4 +15,4 @@ ExecStart={{ atl_startup_exec_path }}{% for c in atl_startup_exec_options %} {{
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-target.target
|
||||
WantedBy={{ atl_systemd_service_target }}
|
||||
|
||||
12
roles/restore_backups/.yamllint
Normal file
12
roles/restore_backups/.yamllint
Normal file
@@ -0,0 +1,12 @@
|
||||
extends: default
|
||||
|
||||
rules:
|
||||
braces:
|
||||
max-spaces-inside: 1
|
||||
level: error
|
||||
brackets:
|
||||
max-spaces-inside: 1
|
||||
level: error
|
||||
line-length: disable
|
||||
truthy: disable
|
||||
trailing-spaces: false
|
||||
4
roles/restore_backups/defaults/main.yml
Normal file
4
roles/restore_backups/defaults/main.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
|
||||
atl_backup_home_restore_canary_filename: ".slingshot_home_restore"
|
||||
atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/{{ atl_backup_home_restore_canary_filename }}"
|
||||
14
roles/restore_backups/molecule/default/Dockerfile.j2
Normal file
14
roles/restore_backups/molecule/default/Dockerfile.j2
Normal file
@@ -0,0 +1,14 @@
|
||||
# Molecule managed
|
||||
|
||||
{% if item.registry is defined %}
|
||||
FROM {{ item.registry.url }}/{{ item.image }}
|
||||
{% else %}
|
||||
FROM {{ item.image }}
|
||||
{% endif %}
|
||||
|
||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
||||
36
roles/restore_backups/molecule/default/molecule.yml
Normal file
36
roles/restore_backups/molecule/default/molecule.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
dependency:
|
||||
name: galaxy
|
||||
driver:
|
||||
name: docker
|
||||
lint:
|
||||
name: yamllint
|
||||
platforms:
|
||||
- name: amazon_linux2
|
||||
image: amazonlinux:2
|
||||
groups:
|
||||
- aws_node_local
|
||||
ulimits:
|
||||
- nofile:262144:262144
|
||||
- name: ubuntu_lts
|
||||
image: ubuntu:bionic
|
||||
groups:
|
||||
- aws_node_local
|
||||
ulimits:
|
||||
- nofile:262144:262144
|
||||
provisioner:
|
||||
name: ansible
|
||||
options:
|
||||
skip-tags: runtime_pkg
|
||||
lint:
|
||||
name: ansible-lint
|
||||
options:
|
||||
x: ["701"]
|
||||
inventory:
|
||||
links:
|
||||
group_vars: ../../../../group_vars/
|
||||
verifier:
|
||||
name: testinfra
|
||||
lint:
|
||||
name: flake8
|
||||
enabled: false
|
||||
10
roles/restore_backups/molecule/default/playbook.yml
Normal file
10
roles/restore_backups/molecule/default/playbook.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
- name: Converge
|
||||
hosts: all
|
||||
vars:
|
||||
atl_backup_manifest_url: ''
|
||||
atl_backup_home_restore_canary_path: '/tmp/canary.tmp'
|
||||
|
||||
roles:
|
||||
# Should be no-op
|
||||
- role: restore_backups
|
||||
10
roles/restore_backups/molecule/default/tests/test_default.py
Normal file
10
roles/restore_backups/molecule/default/tests/test_default.py
Normal file
@@ -0,0 +1,10 @@
|
||||
import os
|
||||
|
||||
import testinfra.utils.ansible_runner
|
||||
|
||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
||||
|
||||
|
||||
def test_no_canary_file(host):
|
||||
assert not host.file('atl_backup_home_restore_canary_path').exists
|
||||
14
roles/restore_backups/molecule/restore/Dockerfile.j2
Normal file
14
roles/restore_backups/molecule/restore/Dockerfile.j2
Normal file
@@ -0,0 +1,14 @@
|
||||
# Molecule managed
|
||||
|
||||
{% if item.registry is defined %}
|
||||
FROM {{ item.registry.url }}/{{ item.image }}
|
||||
{% else %}
|
||||
FROM {{ item.image }}
|
||||
{% endif %}
|
||||
|
||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
||||
30
roles/restore_backups/molecule/restore/molecule.yml
Normal file
30
roles/restore_backups/molecule/restore/molecule.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
dependency:
|
||||
name: galaxy
|
||||
driver:
|
||||
name: docker
|
||||
lint:
|
||||
name: yamllint
|
||||
platforms:
|
||||
- name: amazon_linux2
|
||||
image: amazonlinux:2
|
||||
groups:
|
||||
- aws_node_local
|
||||
ulimits:
|
||||
- nofile:262144:262144
|
||||
provisioner:
|
||||
name: ansible
|
||||
options:
|
||||
skip-tags: runtime_pkg
|
||||
lint:
|
||||
name: ansible-lint
|
||||
options:
|
||||
x: ["701"]
|
||||
inventory:
|
||||
links:
|
||||
group_vars: ../../../../group_vars/
|
||||
verifier:
|
||||
name: testinfra
|
||||
lint:
|
||||
name: flake8
|
||||
enabled: false
|
||||
75
roles/restore_backups/molecule/restore/playbook.yml
Normal file
75
roles/restore_backups/molecule/restore/playbook.yml
Normal file
@@ -0,0 +1,75 @@
|
||||
---
|
||||
- name: Converge
|
||||
hosts: all
|
||||
vars:
|
||||
atl_backup_home_dest: "{{ test_archive }}"
|
||||
atl_backup_home_restore_canary_path: '/tmp/canary.tmp'
|
||||
atl_backup_id: 'test-backup'
|
||||
atl_backup_manifest_url: 'fake_manifest'
|
||||
|
||||
atl_product_edition: 'jira-software'
|
||||
atl_product_home_shared: '/media/atl/jira/shared'
|
||||
atl_product_user: 'jira'
|
||||
atl_product_user_uid: '2001'
|
||||
atl_product_version_cache: "{{ atl_product_home_shared }}/{{ atl_product_edition }}.version"
|
||||
|
||||
test_archive: '/tmp/hello.tar.gz'
|
||||
test_archive_file: 'hello.txt'
|
||||
test_archive_source: '/tmp/hello'
|
||||
test_pre_step_prefix: '[PRE-TEST]'
|
||||
test_product_version_file: "/tmp/{{ atl_product_edition }}.version"
|
||||
|
||||
pre_tasks:
|
||||
- name: "{{ test_pre_step_prefix }} Install tar"
|
||||
package:
|
||||
state: present
|
||||
name: tar
|
||||
|
||||
- name: "{{ test_pre_step_prefix }} Install useradd and groupadd binaries"
|
||||
package:
|
||||
state: present
|
||||
name: shadow-utils
|
||||
|
||||
- name: "{{ test_pre_step_prefix }} Create application group"
|
||||
group:
|
||||
name: "{{ atl_product_user }}"
|
||||
gid: "{{ atl_product_user_uid }}"
|
||||
|
||||
- name: "{{ test_pre_step_prefix }} Create application user"
|
||||
user:
|
||||
name: "{{ atl_product_user }}"
|
||||
uid: "{{ atl_product_user_uid }}"
|
||||
group: "{{ atl_product_user }}"
|
||||
|
||||
- block:
|
||||
- name: "{{ test_pre_step_prefix }} Create a directory for the shared home archive"
|
||||
file:
|
||||
path: "{{ test_archive_source }}"
|
||||
state: directory
|
||||
mode: 0755
|
||||
- name: "{{ test_pre_step_prefix }} Create a file in the shared home"
|
||||
lineinfile:
|
||||
create: yes
|
||||
line: 'Hello, world!'
|
||||
path: "{{ test_archive_source }}/{{ test_archive_file }}"
|
||||
mode: 0640
|
||||
- name: "{{ test_pre_step_prefix }} Create the version file in the shared home"
|
||||
lineinfile:
|
||||
create: yes
|
||||
line: '8.5'
|
||||
path: "{{ test_product_version_file }}"
|
||||
mode: 0640
|
||||
- name: "{{ test_pre_step_prefix }} Archive the shared home"
|
||||
archive:
|
||||
path:
|
||||
- "{{ test_archive_source }}"
|
||||
- "{{ test_product_version_file }}"
|
||||
dest: "{{ test_archive }}"
|
||||
owner: "{{ atl_product_user }}"
|
||||
|
||||
tasks:
|
||||
- name: Install distro-specific restore support packages
|
||||
include_tasks: "../../tasks/{{ ansible_distribution|lower }}.yml"
|
||||
|
||||
- name: Restore shared home
|
||||
include_tasks: "../../tasks/home_restore.yml"
|
||||
39
roles/restore_backups/molecule/restore/tests/test_default.py
Normal file
39
roles/restore_backups/molecule/restore/tests/test_default.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import os
|
||||
import pytest
|
||||
|
||||
import testinfra.utils.ansible_runner
|
||||
|
||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('exe', [
|
||||
'/usr/bin/pg_dump',
|
||||
'/usr/bin/pg_restore',
|
||||
'/usr/bin/psql'
|
||||
])
|
||||
def test_postgresql_amazon_linux_extras_exes(host, exe):
|
||||
assert host.file(exe).exists
|
||||
|
||||
def test_postgresql_version(host):
|
||||
pg_dump_version_output = host.check_output('pg_dump --version')
|
||||
assert '(PostgreSQL) 9.6' in pg_dump_version_output
|
||||
|
||||
@pytest.mark.parametrize('file', [
|
||||
'/media/atl/jira/shared',
|
||||
'/media/atl/jira/shared/hello',
|
||||
'/media/atl/jira/shared/hello/hello.txt'
|
||||
])
|
||||
def test_shared_home_owner(host, file):
|
||||
assert host.file(file).exists
|
||||
assert host.file(file).user == 'jira'
|
||||
assert host.file(file).group == 'jira'
|
||||
|
||||
def test_file_modes(host):
|
||||
assert host.file('/media/atl/jira/shared/hello').mode == 0o755
|
||||
assert host.file('/media/atl/jira/shared/hello/hello.txt').mode == 0o640
|
||||
|
||||
def test_version_file_owned_by_root(host):
|
||||
assert host.file('/media/atl/jira/shared/jira-software.version').exists
|
||||
assert host.file('/media/atl/jira/shared/jira-software.version').user == 'root'
|
||||
assert host.file('/media/atl/jira/shared/jira-software.version').group == 'root'
|
||||
9
roles/restore_backups/tasks/amazon.yml
Normal file
9
roles/restore_backups/tasks/amazon.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
---
|
||||
|
||||
# Amazon Linux 2 supplies extra packages via a special command.
|
||||
- name: Enable Postgresql from 'extras'
|
||||
command: amazon-linux-extras install -y "postgresql{{ postgres_version }}"
|
||||
args:
|
||||
creates: /usr/bin/psql
|
||||
environment:
|
||||
PYTHON: /bin/python
|
||||
44
roles/restore_backups/tasks/home_restore.yml
Normal file
44
roles/restore_backups/tasks/home_restore.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
- name: Check for the restore canary file
|
||||
stat:
|
||||
path: "{{ atl_backup_home_restore_canary_path }}"
|
||||
register: restore_canary
|
||||
|
||||
- block:
|
||||
- name: Create shared home if necessary
|
||||
file:
|
||||
path: "{{ atl_product_home_shared }}"
|
||||
state: directory
|
||||
mode: 0750
|
||||
owner: "{{ atl_product_user }}"
|
||||
group: "{{ atl_product_user }}"
|
||||
|
||||
- name: Restore the shared-home backup
|
||||
unarchive:
|
||||
src: "{{ atl_backup_home_dest }}"
|
||||
remote_src: yes
|
||||
dest: "{{ atl_product_home_shared }}"
|
||||
|
||||
- name: Set shared home owner and group to application user
|
||||
file:
|
||||
path: "{{ atl_product_home_shared }}"
|
||||
recurse: yes
|
||||
group: "{{ atl_product_user }}"
|
||||
owner: "{{ atl_product_user }}"
|
||||
state: directory
|
||||
|
||||
- name: Set version file owner and group to root
|
||||
file:
|
||||
path: "{{ atl_product_version_cache }}"
|
||||
group: root
|
||||
owner: root
|
||||
state: file
|
||||
# Ignore the error in case there is no product version file in the backup
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Create restore-canary if necessary
|
||||
copy:
|
||||
dest: "{{ atl_backup_home_restore_canary_path }}"
|
||||
content: "{{ atl_backup_id }}"
|
||||
|
||||
when: not restore_canary.stat.exists
|
||||
115
roles/restore_backups/tasks/main.yml
Normal file
115
roles/restore_backups/tasks/main.yml
Normal file
@@ -0,0 +1,115 @@
|
||||
---
|
||||
|
||||
# This role will attempt to fetch and load the backup manifest from a
|
||||
# remote S3 URL. On successful completion the contents of JSON or YAML
|
||||
# document will be in the var `atl_backup_manifest`.
|
||||
#
|
||||
# PREREQUISITES:
|
||||
# * `atl_backup_manifest_url` points at the manifest.
|
||||
# * The shared home filesystem is mounted if necessary (e.g. NFS/EFS).
|
||||
# * The database has been created and the variable `db_created` is
|
||||
# registered with the result (i.e: `register: db_created`).
|
||||
#
|
||||
# NOTE: The actual DB/FS restore operations could potentially be split
|
||||
# out into discrete roles, but currently that is not required.
|
||||
#
|
||||
# TODO: Support HTTPS with authentication. Deferred until after the
|
||||
# initial testing release.
|
||||
|
||||
- block:
|
||||
|
||||
- name: Ensure temp directory is present
|
||||
file:
|
||||
path: "{{ atl_installer_temp }}"
|
||||
state: directory
|
||||
mode: 0750
|
||||
owner: "{{ atl_product_user }}"
|
||||
group: "{{ atl_product_user }}"
|
||||
changed_when: false # For Molecule idempotence check
|
||||
|
||||
- name: Parse the manifest URL
|
||||
set_fact:
|
||||
atl_backup_manifest_url: "{{ atl_backup_manifest_url | urlsplit }}"
|
||||
|
||||
- name: Extract manifest file information
|
||||
set_fact:
|
||||
atl_backup_manifest_bucket: "{{ atl_backup_manifest_url.hostname }}"
|
||||
atl_backup_manifest_path: "{{ atl_backup_manifest_url.path }}"
|
||||
atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_url.path | basename }}"
|
||||
|
||||
- name: Fetch the manifest from S3
|
||||
aws_s3:
|
||||
mode: get
|
||||
overwrite: different
|
||||
bucket: "{{ atl_backup_manifest_bucket }}"
|
||||
object: "{{ atl_backup_manifest_path }}"
|
||||
dest: "{{ atl_backup_manifest_dest }}"
|
||||
when: atl_backup_manifest_url.scheme == 's3'
|
||||
|
||||
- name: Load parameters from manifest
|
||||
include_vars:
|
||||
file: "{{ atl_backup_manifest_dest }}"
|
||||
name: atl_backup_manifest
|
||||
|
||||
- name: Define the DB and home dump destinations
|
||||
set_fact:
|
||||
# FIXME: The manifest format is still undecided so the
|
||||
# following usages will need to be updated once it settles..
|
||||
atl_backup_id: "{{ atl_backup_manifest.name }}"
|
||||
atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.db.location.location | basename }}"
|
||||
atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.sharedHome.location.location | basename }}"
|
||||
|
||||
# FIXME: Here we fetch the backups. However we may wish to stream
|
||||
# these directly from S3 to the target DB/FS to avoid requiring
|
||||
# disk-space for the intermediate files.
|
||||
- name: Fetch DB backup from S3
|
||||
aws_s3:
|
||||
mode: get
|
||||
overwrite: different
|
||||
bucket: "{{ atl_backup_manifest.artifacts.db.location.location | urlsplit('hostname') }}"
|
||||
object: "{{ atl_backup_manifest.artifacts.db.location.location | urlsplit('path') }}"
|
||||
# We save the backup as a .tar file so that the postgresql_db module uses pg_restore instead of psql to do restore
|
||||
# This can be removed when ansible 2.10 is released
|
||||
dest: "{{ atl_backup_db_dest }}.tar"
|
||||
|
||||
- name: Fetch Home backup from S3
|
||||
aws_s3:
|
||||
mode: get
|
||||
overwrite: different
|
||||
bucket: "{{ atl_backup_manifest.artifacts.sharedHome.location.location | urlsplit('hostname') }}"
|
||||
object: "{{ atl_backup_manifest.artifacts.sharedHome.location.location | urlsplit('path') }}"
|
||||
dest: "{{ atl_backup_home_dest }}"
|
||||
|
||||
- name: Install distro-specific restore support packages
|
||||
include_tasks: "{{ ansible_distribution|lower }}.yml"
|
||||
|
||||
|
||||
- name: Restore application database
|
||||
postgresql_db:
|
||||
login_host: "{{ atl_db_host }}"
|
||||
login_user: "{{ atl_db_root_user }}"
|
||||
login_password: "{{ atl_db_root_password }}"
|
||||
port: "{{ atl_db_port }}"
|
||||
name: "{{ atl_jdbc_db_name }}"
|
||||
owner: "{{ atl_jdbc_user }}"
|
||||
encoding: "{{ atl_jdbc_encoding }}"
|
||||
lc_collate: "{{ atl_jdbc_collation }}"
|
||||
lc_ctype: "{{ atl_jdbc_ctype }}"
|
||||
template: "{{ atl_jdbc_template }}"
|
||||
# Depends on fetch_backup roles
|
||||
state: restore
|
||||
target: "{{ atl_backup_db_dest }}.tar"
|
||||
target_opts: "-Fc"
|
||||
register: result
|
||||
# managed DBs in cloud providers are not allowing full root access to the DB engine, we can safely ignore the COMMENT ON EXTENSION error
|
||||
failed_when:
|
||||
- result.rc != 0
|
||||
- '"COMMENT ON EXTENSION" not in result.msg'
|
||||
when: db_created.changed and atl_backup_db_dest is defined
|
||||
|
||||
|
||||
- name: Restore shared home
|
||||
include_tasks: "home_restore.yml"
|
||||
|
||||
|
||||
when: atl_restore_required
|
||||
Reference in New Issue
Block a user