Merged master into ITPLT-1857-stash-related-changes

This commit is contained in:
Lee Goolsbee
2022-09-26 14:10:58 +00:00
105 changed files with 1485 additions and 1453 deletions

11
.ansible-lint Normal file
View File

@@ -0,0 +1,11 @@
---
kinds:
- playbook: "./aws_*.yml"
offline: true
skip_list:
- ignore-errors
- meta-no-info
- name[casing] # so many of our task names aren't capitalized
- name[missing] # some tasks have no name
- risky-file-permissions
- yaml # many, many warnings

View File

@@ -0,0 +1,4 @@
---
dependency:
enabled: false
prerun: false

1
.gitignore vendored
View File

@@ -8,3 +8,4 @@ __pycache__
.envrc
.idea
.vscode
ansible_collections

View File

@@ -1,8 +1,9 @@
## Prerequisites
You should have the following software installed:
* Python; 3.x by preference, but 2.7 works.
* You may also need the Python development packages depending on how its installed.
* Python; 3.8 or newer
* You may also need the Python development packages depending on how its installed
* Note that the runtime still requires Python 2 for certain tasks on Amazon Linux 2, but is not necessary for local development
* Python Virtualenv
* Docker
* Cloudtoken
@@ -17,23 +18,26 @@ All other requirements will be installed under Virtualenv.
### Step 1.2: Install development environment dependencies
To ensure compatibility we specify a specific Ansible version; currently 2.7.11
(some older versions had issues with RDS). We do this with
[Pipenv](https://docs.pipenv.org/) to lock the dependency tree. There are 2 main
ways to do this; either directly if packaged, or via pip...
To ensure compatibility we specify a specific Ansible version; currently
ansible-core 2.13.x. We do this with [Pipenv](https://docs.pipenv.org/) to lock
the dependency tree. There are 2 main ways to do this; either directly if
packaged, or via pip...
# Ubuntu 19.04+, Debian 10+
sudo apt-get install pipenv python-dev
# Ubuntu 22.04+, Debian 11+
sudo apt-get install python3-dev python3-pip
# Older versions & RHEL/Amazon Linux, etc.
sudo apt-get install -y python-pip python-dev
# Or...
sudo yum install -y python-pip python-dev
pip install pipenv
# Amazon Linux 2
sudo amazon-linux-extras enable python3.8
sudo yum install python38 python38-pip python38-devel python-lxml
# Mac via Homebrew
brew install pipenv
brew install libpq openssl@3 python@X.x # (where "X.x") is 3.8 or newer
export PATH="/opt/homebrew/opt/libpq/bin:$PATH"
export LDFLAGS="-L/opt/homebrew/opt/openssl@3/lib"
export CPPFLAGS="-I/opt/homebrew/opt/openssl@3/include"
# Finally
pip3 install pipenv
### Step 1.3: Enter the development environment
@@ -44,11 +48,21 @@ development environment:
pipenv sync --dev
pipenv shell --dev
### Step 1.4: Run some tests against a role
### Step 1.4: Install Ansible collections
[Molecule](https://molecule.readthedocs.io/en/stable/) is a testing framework for Ansible. We use this to test the
functionality of individual and groups of roles, and to ensure cross-platform
compatibility (currently Amazon Linux 2 and Ubuntu LTS).
To save a little time during deployment, we rely directly on ansible-core and a
custom set of collections as opposed to installing the community edition. To that
end, when testing locally, you'll need these collections installed where Ansible
expects them to be; that path is configured ansible.cfg and used automatically
when collections are installed via `ansible-galaxy`:
ansible-galaxy collection install --upgrade --verbose --requirements-file requirements.yml
### Step 1.5: Run some tests against a role
[Molecule](https://molecule.readthedocs.io/en/stable/) is a testing framework for
Ansible. We use this to test the functionality of individual and groups of roles,
and to ensure cross-platform compatibility (currently Amazon Linux 2 and Ubuntu LTS).
Were going to check that the role that downloads the products works for both
Jira Core and Confluence, on boths supported Linux distributions. So run the

18
Pipfile
View File

@@ -4,17 +4,17 @@ verify_ssl = true
name = "pypi"
[packages]
ansible = "==2.10.7"
boto3 = "==1.17.49"
botocore = "==1.20.49"
ansible-core = "==2.13.3"
boto3 = "==1.24.68"
botocore = "==1.27.68"
psycopg2-binary = "==2.9.3"
[dev-packages]
molecule = "==3.2.2"
molecule-docker = "==0.2.4"
docker = "==4.4.1"
taskcat = "*"
molecule = "==4.0.1"
molecule-docker = "==2.0.0"
docker = "==6.0.0"
pytest = "*"
testinfra = "*"
pytest-testinfra = "*"
[requires]
python_version = "3.7"
python_version = "3"

1287
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,4 @@
[defaults]
retry_files_enabled = False
callback_whitelist = profile_tasks
conditional_bare_variables = True
callbacks_enabled = profile_tasks
collections_paths = ./

View File

@@ -34,7 +34,7 @@
- role: aws_common
# For Bitbucket DC clusters that store repos on Bitbucket Mesh(https://confluence.atlassian.com/bitbucketserver/bitbucket-data-center-and-server-8-0-release-notes-1115659343.html#BitbucketDataCenterandServer8.0releasenotes-mesh),
# nodes may be setup to use EFS instead of NFS for shared_home by not defining 'atl_fileserver_host'
- { role: aws_shared_fs_config, when: (atl_fileserver_host is not defined or atl_fileserver_host |length == 0) and (atl_efs_id|length > 0) }
- { role: aws_shared_fs_config, when: (atl_fileserver_host is not defined or atl_fileserver_host | length == 0) and (atl_efs_id | length > 0) }
- { role: nfs_mount, when : (atl_fileserver_host is defined) and (atl_fileserver_host|length > 0) }
- role: product_common
- role: product_install

View File

@@ -4,13 +4,14 @@ set -e
source /etc/os-release
if [[ $ID = "amzn" ]]; then
amazon-linux-extras enable python3.8
yum clean metadata
yum install -y \
python3-devel \
python3-pip \
python2-boto3 \
python2-botocore \
python2-pip \
python38 \
python38-pip \
python38-devel \
python-lxml
else
# FIXME: Currently assumes Debian-based
apt-get update && \
@@ -32,4 +33,8 @@ if [[ $1 == "--dev" ]]; then
fi
echo "Installing collections from galaxy..."
pipenv run ansible-galaxy collection install -v -r requirements.yml
galaxy_retry_count=0
until [[ $galaxy_retry_count -gt 2 ]]; do
pipenv run ansible-galaxy collection install --upgrade --verbose --requirements-file requirements.yml && break
galaxy_retry_count=$((galaxy_retry_count + 1))
done

View File

@@ -4,11 +4,13 @@
#
# make > ../bitbucket-pipelines.yml
image: debian:buster
image: debian:bullseye
options:
size: 2x
definitions:
caches:
ansible-collections: ansible_collections
services:
docker:
memory: 4096
@@ -34,178 +36,258 @@ pipelines:
- parallel:
- step:
name: aws_common/cw-disabled
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s cw-disabled
name: aws_common/cw-disabled
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s cw-disabled
- step:
name: aws_common/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s default
name: aws_common/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s default
- step:
name: aws_common/logs-disabled
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s logs-disabled
name: aws_common/logs-disabled
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s logs-disabled
- step:
name: bitbucket_config/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/bitbucket_config
- pipenv run molecule test -s default
name: bitbucket_config/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/bitbucket_config
- pipenv run molecule test -s default
- step:
name: bitbucket_config/iam_elasticsearch
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/bitbucket_config
- pipenv run molecule test -s iam_elasticsearch
name: bitbucket_config/iam_elasticsearch
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/bitbucket_config
- pipenv run molecule test -s iam_elasticsearch
- step:
name: confluence_config/aurora
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s aurora
name: confluence_config/aurora
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s aurora
- step:
name: confluence_config/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s default
name: confluence_config/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s default
- step:
name: confluence_config/password_char_escaping
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s password_char_escaping
name: confluence_config/password_char_escaping
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s password_char_escaping
- step:
name: confluence_config/system_jdk
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s system_jdk
name: confluence_config/system_jdk
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s system_jdk
- step:
name: diy_backup/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/diy_backup
- pipenv run molecule test -s default
name: diy_backup/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/diy_backup
- pipenv run molecule test -s default
- step:
name: jira_config/aurora
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s aurora
name: jira_config/aurora
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s aurora
- step:
name: jira_config/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s default
name: jira_config/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s default
- step:
name: jira_config/jira_config_props
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s jira_config_props
name: jira_config/jira_config_props
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s jira_config_props
- step:
name: jira_config/password_char_escaping
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s password_char_escaping
name: jira_config/password_char_escaping
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s password_char_escaping
- step:
name: linux_common/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/linux_common
- pipenv run molecule test -s default
name: linux_common/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/linux_common
- pipenv run molecule test -s default
- step:
name: product_common/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_common
- pipenv run molecule test -s default
name: product_common/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_common
- pipenv run molecule test -s default
- step:
name: product_common/system_jdk
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_common
- pipenv run molecule test -s system_jdk
name: product_common/system_jdk
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_common
- pipenv run molecule test -s system_jdk
- step:
name: product_install/bitbucket_latest
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s bitbucket_latest
name: product_install/bitbucket_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s bitbucket_latest
- step:
name: product_install/confluence_latest
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s confluence_latest
name: product_install/confluence_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s confluence_latest
- step:
name: product_install/confluence_version_with_uppercase
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
@@ -214,175 +296,252 @@ pipelines:
- cd roles/product_install
- pipenv run molecule test -s confluence_version_with_uppercase
- step:
name: product_install/crowd_latest
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s crowd_latest
name: product_install/crowd_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s crowd_latest
- step:
name: product_install/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s default
name: product_install/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s default
- step:
name: product_install/jira_all
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_all
name: product_install/jira_all
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_all
- step:
name: product_install/jira_tarball_download_url
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball_download_url
name: product_install/jira_cached_with_downgrade
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_cached_with_downgrade
- step:
name: product_install/jira_cached_with_downgrade
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_cached_with_downgrade
name: product_install/jira_cached_with_upgrade
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_cached_with_upgrade
- step:
name: product_install/jira_cached_with_upgrade
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_cached_with_upgrade
name: product_install/jira_software_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_software_latest
- step:
name: product_install/jira_software_latest
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_software_latest
name: product_install/jira_tarball
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball
- step:
name: product_install/jira_tarball
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball
name: product_install/jira_tarball_download_url
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball_download_url
- step:
name: product_install/jira_version_from_file
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_from_file
name: product_install/jira_version_from_file
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_from_file
- step:
name: product_install/jira_version_latest
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_latest
name: product_install/jira_version_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_latest
- step:
name: product_install/jira_version_override
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_override
name: product_install/jira_version_override
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_override
- step:
name: product_install/servicedesk3
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk3
name: product_install/servicedesk3
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk3
- step:
name: product_install/servicedesk4
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk4
name: product_install/servicedesk4
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk4
- step:
name: product_install/servicedesk_latest
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk_latest
name: product_install/servicedesk_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk_latest
- step:
name: product_startup/bitbucket
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s bitbucket
name: product_startup/bitbucket
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s bitbucket
- step:
name: product_startup/default
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s default
name: product_startup/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s default
- step:
name: product_startup/startup_restart_false
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s startup_restart_false
name: product_startup/startup_restart_false
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s startup_restart_false
- step:
name: product_startup/synchrony
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s synchrony
name: product_startup/synchrony
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s synchrony
- step:
name: Run Snyk security scan
services:
- docker
script:
- ./bin/install-ansible --dev
- apt-get update && apt-get install -y npm
- npm install -g snyk@1.455.0
- snyk auth $SNYK_TOKEN
- pipenv run snyk monitor --severity-threshold=high --project-name=dc-deployments-automation
name: Run Snyk security scan
caches:
- docker
- pip
- node
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- apt-get update && apt-get install -y npm
- npm install -g snyk
- snyk auth $SNYK_TOKEN
- pipenv run snyk monitor --severity-threshold=high --project-name=dc-deployments-automation

View File

@@ -38,7 +38,7 @@ atl_installer_temp: "{{ atl_installation_base }}/tmp"
# installed and linked to `/usr/lib/jvm/java`.
# See product_common/task/ubuntu.yml for an example of apropriate
# linking using `alternatives`.
atl_java_home: "{{ '/usr/lib/jvm/java' if atl_use_system_jdk else (atl_product_installation_current + '/jre') }}"
atl_java_home: "{{ '/usr/lib/jvm/java' if atl_use_system_jdk | bool else (atl_product_installation_current + '/jre') }}"
atl_java_binary: "{{ atl_java_home }}/bin/java"
atl_product_logs_default: &logs_default
@@ -79,8 +79,8 @@ atl_aws_region: "{{ lookup('env', 'ATL_AWS_REGION') }}"
atl_aws_iam_role: "{{ lookup('env', 'ATL_AWS_IAM_ROLE') }}"
atl_aws_iam_role_arn: "{{ lookup('env', 'ATL_AWS_IAM_ROLE_ARN') }}"
atl_aws_enable_cloudwatch: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH')|bool or false }}"
atl_aws_enable_cloudwatch_logs: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH_LOGS')|bool or false }}"
atl_aws_enable_cloudwatch: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH') | bool or false }}"
atl_aws_enable_cloudwatch_logs: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH_LOGS') | bool or false }}"
atl_db_engine: "{{ lookup('env', 'ATL_DB_ENGINE') }}"
atl_db_host: "{{ lookup('env', 'ATL_DB_HOST') }}"
@@ -100,7 +100,7 @@ atl_db_timebetweenevictionrunsmillis: "{{ lookup('env', 'ATL_DB_TIMEBETWEENEVICT
atl_db_minevictableidletimemillis: "{{ lookup('env', 'ATL_DB_MINEVICTABLEIDLETIMEMILLIS') or '5000' }}"
atl_db_removeabandoned: "{{ lookup('env', 'ATL_DB_REMOVEABANDONED') or 'true' }}"
atl_db_removeabandonedtimeout: "{{ lookup('env', 'ATL_DB_REMOVEABANDONEDTIMEOUT') or '300' }}"
atl_db_testwhileidle: "{{ lookup('env', 'ATL_DB_TESTWHILEIDLE') or 'true'}}"
atl_db_testwhileidle: "{{ lookup('env', 'ATL_DB_TESTWHILEIDLE') or 'true' }}"
atl_db_testonborrow: "{{ lookup('env', 'ATL_DB_TESTONBORROW') or 'false' }}"
atl_db_engine_to_db_type_map:
aurora_postgres: "postgresaurora96"
@@ -116,7 +116,7 @@ atl_jdbc_ctype: "{{ lookup('env', 'ATL_JDBC_CTYPE') or 'en_US.UTF-8' }}"
atl_jdbc_template: "{{ lookup('env', 'ATL_JDBC_TEMPLATE') or 'template1' }}"
atl_jdbc_query_params_for_engine:
aurora_postgres: "?targetServerType=master"
atl_jdbc_url: "jdbc:postgresql://{{ atl_db_host }}:{{ atl_db_port }}/{{ atl_jdbc_db_name }}{{ atl_jdbc_query_params_for_engine[atl_db_engine]| default('') }}"
atl_jdbc_url: "jdbc:postgresql://{{ atl_db_host }}:{{ atl_db_port }}/{{ atl_jdbc_db_name }}{{ atl_jdbc_query_params_for_engine[atl_db_engine] | default('') }}"
atl_jvm_heap: "{{ lookup('env', 'ATL_JVM_HEAP') or '2048m' }}"
atl_jvm_opts: "{{ lookup('env', 'ATL_JVM_OPTS') or '' }}"

View File

@@ -4,11 +4,13 @@
#
# make > ../bitbucket-pipelines.yml
image: debian:buster
image: debian:bullseye
options:
size: 2x
definitions:
caches:
ansible-collections: ansible_collections
services:
docker:
memory: 4096
@@ -35,23 +37,32 @@ pipelines:
- parallel:
{% for spath in scenario_paths %}
- step:
name: {{ spath.parts[2] }}/{{ spath.parts[4] }}
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/{{ spath.parts[2] }}
- pipenv run molecule test -s {{ spath.parts[4] }}
name: {{ spath.parts[2] }}/{{ spath.parts[4] }}
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/{{ spath.parts[2] }}
- pipenv run molecule test -s {{ spath.parts[4] }}
{% endfor %}
- step:
name: Run Snyk security scan
services:
- docker
script:
- ./bin/install-ansible --dev
- apt-get update && apt-get install -y npm
- npm install -g snyk
- snyk auth $SNYK_TOKEN
- pipenv run snyk monitor --severity-threshold=high --project-name=dc-deployments-automation
name: Run Snyk security scan
caches:
- docker
- pip
- node
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- apt-get update && apt-get install -y npm
- npm install -g snyk
- snyk auth $SNYK_TOKEN
- pipenv run snyk monitor --severity-threshold=high --project-name=dc-deployments-automation

View File

@@ -1,4 +1,12 @@
---
collections:
- name: amazon.aws
version: 3.0.0
version: "4.1.0"
- name: ansible.posix
version: "1.4.0"
- name: community.docker
version: "3.0.2"
- name: community.general
version: "5.5.0"
- name: community.postgresql
version: "2.2.0"

View File

@@ -1,15 +1,15 @@
---
- name: Enable CloudWatch Agent
systemd:
ansible.builtin.systemd:
name: "amazon-cloudwatch-agent.service"
daemon_reload: true
enabled: true
when: atl_aws_agent_restart
when: atl_aws_agent_restart | bool
- name: Restart CloudWatch Agent
systemd:
ansible.builtin.systemd:
name: "amazon-cloudwatch-agent.service"
enabled: true
state: restarted
when: atl_aws_agent_restart
when: atl_aws_agent_restart | bool

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -12,6 +10,8 @@ platforms:
# image: ubuntu:bionic
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -12,6 +10,8 @@ platforms:
# image: ubuntu:bionic
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -12,6 +10,8 @@ platforms:
# image: ubuntu:bionic
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -1,18 +1,22 @@
---
- name: Install AWS support packages
yum:
ansible.builtin.yum:
name:
- amazon-efs-utils
- amazon-ssm-agent
- awscli
- git
- ec2-utils
vars:
ansible_python_interpreter: /usr/bin/python2
- name: Install CloudWatch Agent
yum:
ansible.builtin.yum:
name:
- "{{ aws_cloudwatch_agent_rpm }}"
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch | bool
notify:
- Enable CloudWatch Agent
vars:
ansible_python_interpreter: /usr/bin/python2

View File

@@ -1,31 +1,31 @@
---
- name: Fetch local EC2 metadata
ec2_metadata_facts:
amazon.aws.ec2_metadata_facts:
tags:
- notest
- name: Install distro-specific prerequisites
include_tasks: "{{ ansible_distribution|lower }}.yml"
ansible.builtin.include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Use EC2 instance ID for cluster node ID
set_fact:
ansible.builtin.set_fact:
atl_cluster_node_id: "{{ ansible_ec2_instance_id }}"
atl_local_ipv4: "{{ ansible_ec2_local_ipv4 | default(ansible_default_ipv4.address) }}"
- name: Generate CloudWatch config
template:
ansible.builtin.template:
src: "amazon-cloudwatch-agent.json.j2"
dest: "/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json"
owner: root
group: root
mode: 0644
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch | bool
notify:
- Restart CloudWatch Agent
- name: Store some metadata about this run
include_tasks: "write-tags.yml"
ansible.builtin.include_tasks: "write-tags.yml"
- name: Initiate the startup of any new AWS services now
meta: flush_handlers
ansible.builtin.meta: flush_handlers

View File

@@ -1,28 +1,27 @@
---
- name: Retrieve all available EC2 tags
ec2_tag:
amazon.aws.ec2_tag_info:
region: "{{ ansible_ec2_placement_region }}"
resource: "{{ ansible_ec2_instance_id }}"
state: list
register: ec2_instance_tags
ignore_errors: true
tags:
- notest
- name: Retrieve autoscaling group
set_fact:
ansible.builtin.set_fact:
ec2_autoscaling_group: "{{ ec2_instance_tags.tags['aws:autoscaling:groupName'] | default('') }}"
- block:
# No existing timestamp, so this is a first run. Persist some metadata into the ASG.
- name: Fetch the git revision for this repo
command:
- name: Fetch the git revision for this repo # noqa: command-instead-of-module no-changed-when
ansible.builtin.command:
cmd: git rev-parse HEAD
register: git_out
- name: Setup the new ASG tags
set_fact:
ansible.builtin.set_fact:
deployment_firstrun_meta:
- ResourceType: "auto-scaling-group"
ResourceId: "{{ ec2_autoscaling_group }}"
@@ -40,15 +39,15 @@
# Set the tags on the ASG and the local instance. We need to
# ignore errors as it's possible we don't have the permissions,
# and we can't check up-front.
- name: Set the first-run tags on the ASG ("FAIL" is not critical)
command: "aws autoscaling
- name: Set the first-run tags on the ASG ("FAIL" is not critical) # noqa: no-changed-when
ansible.builtin.command: "aws autoscaling
create-or-update-tags
--region {{ ansible_ec2_placement_region }}
--tags '{{ deployment_firstrun_meta | to_json }}'"
ignore_errors: true
- name: Set the tags on the local instance ("FAIL" is not critical)
ec2_tag:
amazon.aws.ec2_tag:
region: "{{ ansible_ec2_placement_region }}"
resource: "{{ ansible_ec2_instance_id }}"
tags:

View File

@@ -4,7 +4,7 @@
"run_as_user": "root"
},
{% if atl_aws_enable_cloudwatch_logs is defined and atl_aws_enable_cloudwatch_logs %}
{% if atl_aws_enable_cloudwatch_logs is defined and atl_aws_enable_cloudwatch_logs | bool %}
"logs": {
"logs_collected": {
"files": {

View File

@@ -1,3 +1,3 @@
---
dependencies:
- aws_common
- role: aws_common

View File

@@ -1,13 +1,13 @@
---
- name: Create mountpoint
file:
ansible.builtin.file:
state: directory
path: "{{ atl_shared_mountpoint }}"
mode: 0755
- name: Enable mountpoint in fstab
mount:
ansible.posix.mount:
path: "{{ atl_shared_mountpoint }}"
src: "{{ efs_target }}:{{ efs_src_dir }}"
fstype: "{{ efs_type }}"

View File

@@ -1,4 +1,7 @@
---
galaxy_info:
namespace: dc_deployments_automation
dependencies:
- bitbucket_common
- role: bitbucket_common

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,7 +1,7 @@
---
- name: Create Bitbucket shared dir if necessary
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
@@ -9,14 +9,14 @@
state: directory
- name: Create Bitbucket config file
template:
ansible.builtin.template:
src: bitbucket.properties.j2
dest: "{{ atl_product_home_shared }}/bitbucket.properties"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
- name: Remove write permissions from installation directory
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
@@ -25,7 +25,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"

View File

@@ -13,7 +13,7 @@ hazelcast.network.aws.tag.value={{ atl_aws_stack_name }}
hazelcast.group.name={{ atl_aws_stack_name }}
hazelcast.group.password={{ atl_aws_stack_name }}
plugin.search.elasticsearch.baseurl={{ atl_elasticsearch_endpoint }}
{% if elasticsearch_should_auth_with_iam %}
{% if elasticsearch_should_auth_with_iam | bool %}
plugin.search.elasticsearch.aws.region={{ atl_aws_region }}
{% else %}
plugin.search.elasticsearch.username={{ atl_elasticsearch_username }}

View File

@@ -1,4 +1,4 @@
---
dependencies:
- bitbucket_common
- role: bitbucket_common

View File

@@ -1,24 +1,24 @@
---
- name: Force all notified handlers to run at this point, not waiting for normal sync points
meta: flush_handlers
ansible.builtin.meta: flush_handlers
- name: wait for port 7990 to be up
wait_for:
ansible.builtin.wait_for:
port: 7990
delay: 60
- name: wait for path to become available
wait_for:
ansible.builtin.wait_for:
path: "{{ atl_product_home_shared }}/data/migration/import"
delay: 60
- name: Copy Bitbucket dataset from s3
get_url:
ansible.builtin.get_url:
url: "{{ atl_bitbucket_dataset_url }}"
dest: "{{ atl_product_home_shared }}/data/migration/import"
- name: Invoke Import API
uri:
ansible.builtin.uri:
url: "{{ atl_bitbucket_baseurl }}/rest/api/1.0/migration/imports"
user: admin
password: "{{ atl_bitbucket_admin_password }}"
@@ -36,7 +36,7 @@
failed_when: output is defined and output.json is defined and output.json.state != 'INITIALISING'
- name: get import status
uri:
ansible.builtin.uri:
url: "{{ atl_bitbucket_baseurl }}/rest/api/1.0/migration/imports/{{ output.json.id }}"
user: admin
password: "{{ atl_bitbucket_admin_password }}"
@@ -50,7 +50,7 @@
delay: 10
- name: create lock file
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared }}/data/migration/import/lock.file"
state: touch
when: import_status.json.state == 'COMPLETED'

View File

@@ -33,7 +33,7 @@ atl_catalina_opts_extra: >-
-Dsynchrony.proxy.enabled=false
-Dconfluence.cluster.node.name={{ atl_local_ipv4 }}
-Dconfluence.cluster.hazelcast.max.no.heartbeat.seconds=60
{% if atl_synchrony_service_url|string|length %}-Dsynchrony.service.url={{ atl_synchrony_service_url }}{% endif %}
{% if atl_synchrony_service_url | string | length %}-Dsynchrony.service.url={{ atl_synchrony_service_url }}{% endif %}
atl_tomcat_port: "8080"
atl_tomcat_mgmt_port: "8005"

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
@@ -15,6 +13,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,13 +1,15 @@
---
- name: Install Google Noto fonts for language coverage
yum:
ansible.builtin.yum:
name:
- "google-noto-*"
vars:
ansible_python_interpreter: /usr/bin/python2
- name: Link the language fonts into the JDK
# Not idiomatic, but cleaner than messing with nested lookups...
shell:
ansible.builtin.shell:
cmd: "ln -sf /usr/share/fonts/google-noto*/* {{ item }}/"
creates: "{{ item }}/NotoSansJavanese-Regular.ttf"
warn: false

View File

@@ -1,7 +1,7 @@
---
- name: Create application directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -17,28 +17,25 @@
# Create symlink to force single (unclustered) Confluence to store
# shared-data and attachments in the shared drive.
- name: Symlink local attachments to shared storage
file:
src: "{{ item.0 }}"
dest: "{{ item.1 }}"
ansible.builtin.file:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
force: false
state: link
mode: 0750
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
vars:
- links:
- ["{{ atl_product_home_shared }}/", "{{ atl_product_home }}/shared-home"]
- ["{{ atl_product_home_shared }}/attachments/", "{{ atl_product_home }}/attachments"]
with_nested:
- "{{ links }}"
loop:
- {src: "{{ atl_product_home_shared }}/", dest: "{{ atl_product_home }}/shared-home"}
- {src: "{{ atl_product_home_shared }}/attachments/", dest: "{{ atl_product_home }}/attachments"}
- name: Create Tomcat server config
template:
ansible.builtin.template:
src: server.xml.j2
dest: "{{ atl_product_installation_versioned }}/conf/server.xml"
- name: Override JVM memory settings.
replace:
ansible.builtin.replace:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
regexp: "-{{ item }}\\d+m "
replace: "-{{ item }}{{ atl_jvm_heap }} "
@@ -47,30 +44,30 @@
- 'Xms'
- name: Set the Tomcat environment
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
insertafter: "EOF"
line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"'
- name: Configure login properties
template:
ansible.builtin.template:
src: seraph-config.xml.j2
dest: "{{ atl_product_installation_versioned }}/confluence/WEB-INF/classes/seraph-config.xml"
- name: Configure Confluence home directory
template:
ansible.builtin.template:
src: confluence-init.properties.j2
dest: "{{ atl_product_installation_versioned }}/confluence/WEB-INF/classes/confluence-init.properties"
- name: Create Confluence configuration
template:
ansible.builtin.template:
src: confluence.cfg.xml.j2
dest: "{{ atl_product_home }}/confluence.cfg.xml"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
- name: Limit permissions on the installation directory
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
@@ -83,7 +80,7 @@
changed_when: false # For Molecule idempotence check
- name: Grant access to the product working directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -97,7 +94,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory owned by product so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -106,7 +103,7 @@
changed_when: false # For Molecule idempotence check
- name: Assert baseurl to same as atl_proxy_name
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_jdbc_user }}"
login_password: "{{ atl_jdbc_password }}"
@@ -126,11 +123,11 @@
ignore_errors: yes # For Molecule as it has no db test framework included
- name: Create JVM font fallback directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
with_items: "{{ atl_fonts_fallback_dirs }}"
- name: Install & configure distro language fonts
include_tasks: "{{ ansible_distribution|lower }}_fonts.yml"
ansible.builtin.include_tasks: "{{ ansible_distribution|lower }}_fonts.yml"

View File

@@ -1,13 +1,13 @@
---
- name: Install Google Noto fonts for language coverage
package:
ansible.builtin.package:
name:
- "fonts-noto"
- name: Link the language fonts into the JDK
# Not idiomatic, but cleaner than messing with nested lookups...
shell:
ansible.builtin.shell:
cmd: "ln -sf /usr/share/fonts/truetype/noto/* {{ item }}/"
creates: "{{ item }}/NotoSansJavanese-Regular.ttf"
warn: false

View File

@@ -1,60 +1,60 @@
---
- name: Create server config
template:
ansible.builtin.template:
src: server.xml.j2
dest: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/server.xml"
- name: Set the minimum heap size (Xms)
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
regexp: '^(.*)Xms(\d+\w)(\s.*)$'
line: '\1Xms{{ atl_jvm_heap }}\3'
backrefs: yes
- name: Set the maxmimum heap size (Xmx)
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
regexp: '^(.*)Xmx(\d+\w)(\s.*)$'
line: '\1Xmx{{ atl_jvm_heap }}\3'
backrefs: yes
- name: Set Crowd home directory in crowd-init.properties file
lineinfile:
- name: Set Crowd home directory in crowd-init.properties file
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/crowd-webapp/WEB-INF/classes/crowd-init.properties"
line: 'crowd.home={{ atl_product_home }}'
- name: Export CATALINA_OPTS in setenv.sh
lineinfile:
ansible.builtin.lineinfile:
path: '{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh'
line: 'export CATALINA_OPTS'
- name: CATALINA_OPTS to list
set_fact:
catalina_ops_list: "{{ catalina_ops_list|default([]) }} + {{ (item | trim | regex_replace('^-')).split(' -') }}"
ansible.builtin.set_fact:
catalina_ops_list: "{{ catalina_ops_list | default([]) + (item | trim | regex_replace('^-')).split(' -') }}"
loop:
- '{{ atl_catalina_opts }}'
- '{{ atl_catalina_opts_extra }}'
- name: CATALINA_OPTS unique and sorted
set_fact:
ansible.builtin.set_fact:
catalina_opts: "{{ query('flattened', catalina_ops_list) | reject('equalto', '') | unique | sort }}"
- name: Set CATALINA_OPTS in setenv.sh
blockinfile:
ansible.builtin.blockinfile:
block: "{{ lookup('template', 'templates/catalina_opts.j2') }}"
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
insertbefore: "^export CATALINA_OPTS$"
marker: "# {mark} ANSIBLE MANAGED CATALINA_OPTS"
- name: Set JAVA_HOME
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
insertafter: "EOF"
line: "export JAVA_HOME={{ atl_java_home }}"
- name: Create application directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -67,7 +67,7 @@
changed_when: false # For Molecule idempotence check
- name: Limit permissions on the installation directory
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
@@ -80,7 +80,7 @@
changed_when: false # For Molecule idempotence check
- name: Grant access to the product working directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -94,7 +94,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory owned by product so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -103,7 +103,7 @@
changed_when: false # For Molecule idempotence check
- name: Symlink Crowd shared home directory
file:
ansible.builtin.file:
src: "{{ atl_product_home_shared }}"
dest: "{{ atl_product_home }}/shared"
state: link
@@ -112,7 +112,7 @@
changed_when: false # For Molecule idempotence check
- name: Assert baseurl to same as atl_proxy_name
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_jdbc_user }}"
login_password: "{{ atl_jdbc_password }}"
@@ -131,32 +131,32 @@
ignore_errors: yes # For Molecule as it has no db test framework included
- name: Check that crowd.cfg.xml exists
stat:
ansible.builtin.stat:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
register: crowd_cfg_stat_result
- block:
- name: Assert JDBC password to same as atl_jdbc_password
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.connection.password']"
value: "{{ atl_jdbc_password }}"
- name: Assert JDBC url to same as atl_jdbc_url
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.connection.url']"
value: "{{ atl_jdbc_url }}?reWriteBatchedInserts=true"
- name: Assert hibernate.c3p0.max_size to same as atl_db_poolmaxsize
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.c3p0.max_size']"
value: "{{ atl_db_poolmaxsize }}"
when: atl_db_poolmaxsize is defined
- name: Assert hibernate.c3p0.min_size to same as atl_db_poolminsize
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.c3p0.min_size']"
value: "{{ atl_db_poolminsize }}"
@@ -165,6 +165,6 @@
when: crowd_cfg_stat_result.stat.exists
- name: Remove crowd.xml to prevent duplicates from appearing in cluster reporting
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/Catalina/localhost/crowd.xml"
state: absent

View File

@@ -1,7 +1,7 @@
---
- name: Create application DB user
postgresql_user:
community.postgresql.postgresql_user:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -11,7 +11,7 @@
expires: 'infinity'
- name: Collect dbcluster db_names
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -22,7 +22,7 @@
- block:
- name: Update root privs for new user
postgresql_privs:
community.postgresql.postgresql_privs:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -33,7 +33,7 @@
# RDS does not allow changing the collation on an existing DB, it only allows collation change on creation of db. If the db already exists, we need the “create new application database” task to be skipped, idempotence can not be relied upon as we cant be certain the collation of the existing db
- name: Create new application database
postgresql_db:
community.postgresql.postgresql_db:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -45,13 +45,13 @@
lc_ctype: "{{ atl_jdbc_ctype }}"
template: "{{ atl_jdbc_template }}"
register: db_created
when: "atl_jdbc_db_name not in (dbcluster_db_names.query_result | map(attribute='datname') )"
when: "atl_jdbc_db_name not in (dbcluster_db_names.query_result | map(attribute='datname'))"
tags:
- new_only
- name: Assert ownership of public schema
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -59,7 +59,7 @@
query: "ALTER SCHEMA public OWNER to {{ atl_db_root_user }};"
- name: Grant privs to root user on public schema
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -67,7 +67,7 @@
query: "GRANT ALL ON SCHEMA public TO {{ atl_db_root_user }};"
- name: Grant privs to application user on public schema
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"

View File

@@ -20,12 +20,12 @@
pre_tasks:
- name: Create base dir
file:
ansible.builtin.file:
path: '/opt/atlassian/bin'
state: directory
- name: Install git
package:
ansible.builtin.package:
name: git
roles:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -2,19 +2,19 @@
- name: Fetch the DIY backups repository
git:
ansible.builtin.git:
repo: "{{ atl_diy_backup_repo }}"
dest: "{{ atl_diy_backup_dir }}"
version: "master"
- name: Configure DIY backup for BB on AWS
template:
ansible.builtin.template:
src: "bitbucket.diy-backup.vars.sh.j2"
dest: "{{ atl_diy_backup_dir }}/bitbucket.diy-backup.vars.sh"
mode: 0640
- name: Install backup wrapper script
template:
ansible.builtin.template:
src: "run-backup.j2"
dest: "{{ atl_installation_base }}/bin/run-backup"
mode: 0750

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -30,11 +30,11 @@
pre_tasks:
- name: Create shared home
file:
ansible.builtin.file:
path: '/media/atl/jira/shared/'
state: directory
- name: Create jira-config.properties to check copy
copy:
ansible.builtin.copy:
dest: '/media/atl/jira/shared/jira-config.properties'
content: "jira.projectkey.warning = testwarning"
force: false # For idempotency check

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -2,29 +2,29 @@
- name: Create database config
template:
ansible.builtin.template:
src: dbconfig.xml.j2
dest: "{{ atl_product_home }}/dbconfig.xml"
owner: "{{ atl_product_user }}"
- name: Create cluster config
template:
ansible.builtin.template:
src: cluster.properties.j2
dest: "{{ atl_product_home }}/cluster.properties"
owner: "{{ atl_product_user }}"
- name: Create server config
template:
ansible.builtin.template:
src: server.xml.j2
dest: "{{ atl_product_installation_versioned }}/conf/server.xml"
- name: Check for a jira-config.properties in the shared home
stat:
ansible.builtin.stat:
path: "{{ atl_product_home_shared }}/jira-config.properties"
register: jira_config_properties
- name: Copy jira-config.properties if exists
copy:
ansible.builtin.copy:
remote_src: true
src: "{{ atl_product_home_shared }}/jira-config.properties"
dest: "{{ atl_product_home }}/jira-config.properties"
@@ -35,7 +35,7 @@
- name: Override JVM memory settings.
# Ugly but necessary as the product installs this file so we need to make the change here.
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
backrefs: true
regexp: "^{{ item }}="
@@ -45,25 +45,25 @@
- 'JVM_MAXIMUM_MEMORY'
- name: Set Jira home directory
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
regexp: "JIRA_HOME="
line: 'JIRA_HOME="{{ atl_product_home }}"'
- name: Set the Tomcat environment
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
insertafter: "EOF"
line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"'
- name: Set support recommended JVM args
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
regexp: "JVM_SUPPORT_RECOMMENDED_ARGS="
line: 'JVM_SUPPORT_RECOMMENDED_ARGS="{{ atl_jvm_opts }}"'
- name: Create application directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -77,7 +77,7 @@
- name: Limit permissions on the installation directory
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
@@ -90,7 +90,7 @@
changed_when: false # For Molecule idempotence check
- name: Grant access to the product working directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -104,7 +104,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory owned by product so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -113,7 +113,7 @@
changed_when: false # For Molecule idempotence check
- name: Assert baseurl to same as atl_proxy_name
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_jdbc_user }}"
login_password: "{{ atl_jdbc_password }}"

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -1,16 +1,18 @@
---
- name: Install Amazon-Linux-specific support packages
yum:
ansible.builtin.yum:
name:
- dejavu-sans-fonts
- file
- git-{{ git_version }}
- libxml2
- shadow-utils
vars:
ansible_python_interpreter: /usr/bin/python2
- name: Limit the SSH ciphers
lineinfile:
ansible.builtin.lineinfile:
path: "/etc/ssh/sshd_config"
# Drop insecure ciphers, currently 3des-cbc only. You can get the
# full list with `sshd -T | grep -i ciphers`

View File

@@ -3,32 +3,31 @@
# Note: Try and limit these to packages that are distro-specific, and
# place commonly-named ones below.
- name: Install distro-specific prerequisites
include_tasks: "{{ ansible_distribution|lower }}.yml"
ansible.builtin.include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Install common support packages
package:
ansible.builtin.package:
name:
- jq
- tar
- curl
- unzip
- fontconfig
- python-psycopg2
- name: Create product group
group:
ansible.builtin.group:
name: "{{ atl_product_user }}"
gid: "{{ atl_product_user_uid }}"
- name: Create product user
user:
ansible.builtin.user:
name: "{{ atl_product_user }}"
uid: "{{ atl_product_user_uid }}"
group: "{{ atl_product_user }}"
comment: "Product runtime user"
- name: Stop systemd-cleanup deleting the jvm socket file
copy:
ansible.builtin.copy:
src: java.conf
dest: "/usr/lib/tmpfiles.d/java.conf"
owner: root
@@ -37,7 +36,7 @@
register: systemd_config_changed
- name: Force systemd to reload daemon configuration
systemd:
ansible.builtin.systemd:
daemon_reload: yes
when:
- systemd_config_changed is defined

View File

@@ -1,9 +1,8 @@
---
- name: Install common Ubuntu support packages
apt:
ansible.builtin.apt:
name:
- python3-psycopg2
- libxml2-utils
- git
- fontconfig

View File

@@ -1,7 +1,7 @@
---
- name: Create mountpoint
file:
ansible.builtin.file:
state: directory
path: "{{ atl_shared_mountpoint }}"
mode: 0755
@@ -9,7 +9,7 @@
group: "{{ atl_product_user }}"
- name: Enable mountpoint in fstab
mount:
ansible.posix.mount:
src: "{{ atl_fileserver_host }}:{{ atl_nfs_target }}"
path: "{{ atl_nfs_mountpoint }}"
fstype: nfs

View File

@@ -1,6 +1,6 @@
---
- name: Restart NFS
service:
ansible.builtin.service:
name: "nfs.service"
state: restarted

View File

@@ -1,6 +1,8 @@
---
- name: Install Amazon-Linux-specific NFS packages
yum:
ansible.builtin.yum:
name:
- nfs-utils
vars:
ansible_python_interpreter: /usr/bin/python2

View File

@@ -1,16 +1,16 @@
---
- name: Install distro-specific NFS packages
include_tasks: "{{ ansible_distribution|lower }}.yml"
ansible.builtin.include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Create mountpoint
file:
ansible.builtin.file:
path: "{{ atl_shared_mountpoint }}"
state: directory
- name: Setup the disk partition
parted:
community.general.parted:
device: "{{ atl_nfs_server_device }}"
label: gpt
name: "{{ atl_nfs_fs_label }}"
@@ -22,7 +22,7 @@
- name: Create the filesystem
filesystem:
community.general.filesystem:
dev: "{{ atl_nfs_server_device }}"
fstype: "{{ atl_nfs_fs_type }}"
opts: "-L {{ atl_nfs_fs_label }}"
@@ -30,7 +30,7 @@
- new_only
- name: Setup fstab and mount the filesystem
mount:
ansible.posix.mount:
path: "{{ atl_shared_mountpoint }}"
src: "LABEL={{ atl_nfs_fs_label }}"
fstype: "{{ atl_nfs_fs_type }}"
@@ -40,7 +40,7 @@
- name: Create the shared home
file:
ansible.builtin.file:
path: "{{ atl_shared_mountpoint }}/{{ atl_product_user }}/shared"
state: directory
owner: "{{ atl_product_user }}"
@@ -51,14 +51,14 @@
- name: Create the NFS export file
template:
ansible.builtin.template:
src: "media-atl.exports.j2"
dest: "/etc/exports.d/20-media-atl.exports"
notify:
- Restart NFS
- name: Enable NFS
service:
ansible.builtin.service:
name: nfs.service
enabled: true
state: started

View File

@@ -1,7 +1,7 @@
---
- name: Install Ubuntu-specific NFS packages
apt:
ansible.builtin.apt:
name:
- nfs-kernel-server
- libnfs-utils

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -10,5 +8,7 @@ platforms:
image: ubuntu:bionic
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
verifier:
name: testinfra

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -10,5 +8,7 @@ platforms:
image: ubuntu:bionic
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
verifier:
name: testinfra

View File

@@ -4,7 +4,7 @@
block:
- name: Add Adoptium yum repository
yum_repository:
ansible.builtin.yum_repository:
name: Adoptium
file: adoptium
description: Adoptium Repo
@@ -14,22 +14,26 @@
state: present
- name: Install Eclipse Temurin JDK
yum:
ansible.builtin.yum:
name: "temurin-{{ java_major_version }}-jdk"
state: present
vars:
ansible_python_interpreter: /usr/bin/python2
- name: Ensure common JDK symlink exists
alternatives:
community.general.alternatives:
link: "/usr/lib/jvm/java"
name: "java_sdk"
path: "/usr/lib/jvm/temurin-{{ java_major_version }}-jdk"
priority: 99
when: atl_use_system_jdk
when: atl_use_system_jdk | bool
tags:
- runtime_pkg
- name: Install other base packages on Amazon Linux
yum:
ansible.builtin.yum:
name:
- dejavu-fonts-common # Required by the installer
vars:
ansible_python_interpreter: /usr/bin/python2

View File

@@ -1,4 +1,4 @@
---
- name: Perform distro-specific tasks
include_tasks: "{{ ansible_distribution|lower }}.yml"
ansible.builtin.include_tasks: "{{ ansible_distribution|lower }}.yml"

View File

@@ -4,39 +4,39 @@
block:
- name: Install gnupg
apt:
ansible.builtin.apt:
name: gnupg
state: present
- name: Add Adoptium debian repo public key
apt_key:
ansible.builtin.apt_key:
url: https://packages.adoptium.net/artifactory/api/gpg/key/public
state: present
- name: Add Adoptium debian repository
apt_repository:
ansible.builtin.apt_repository:
repo: "deb https://packages.adoptium.net/artifactory/deb {{ ansible_distribution_release }} main"
state: present
filename: adoptium
- name: Install Eclipse Temurin JDK
apt:
ansible.builtin.apt:
name: "temurin-{{ java_major_version }}-jdk"
update_cache: yes
state: present
- name: Ensure common JDK symlink exists
alternatives:
community.general.alternatives:
link: "/usr/lib/jvm/java"
name: "java_sdk"
path: "/usr/lib/jvm/temurin-{{ java_major_version }}-jdk-{{ debian_architecture }}"
priority: 99
when: atl_use_system_jdk
when: atl_use_system_jdk | bool
tags:
- runtime_pkg
- name: Install other base packages on Ubuntu
package:
ansible.builtin.package:
name:
- fonts-dejavu-core # Required by installer

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -16,11 +16,11 @@
atl_jdbc_template: 'template0'
pre_tasks:
- name: Create cache dir
file:
ansible.builtin.file:
path: '/media/atl/jira/shared/'
state: directory
- name: Seed version
copy:
ansible.builtin.copy:
dest: '/media/atl/jira/shared/jira-core.version'
content: "8.14.0"
force: false # For idempotency check

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -10,11 +10,11 @@
pre_tasks:
- name: Create cache dir
file:
ansible.builtin.file:
path: '/media/atl/jira/shared/'
state: directory
- name: Seed version
copy:
ansible.builtin.copy:
dest: '/media/atl/jira/shared/jira-core.version'
content: "7.10.2"
force: false # For idempotency check

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
vvv: true
skip-tags: runtime_pkg

View File

@@ -10,11 +10,11 @@
pre_tasks:
- name: Create cache dir
file:
ansible.builtin.file:
path: '/media/atl/jira/shared/'
state: directory
- name: Seed version
copy:
ansible.builtin.copy:
dest: '/media/atl/jira/shared/jira-core.version'
content: "7.9.0"
force: false # For idempotency check

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
vvv: true
skip-tags: runtime_pkg

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -10,11 +10,11 @@
pre_tasks:
- name: Create cache dir
file:
ansible.builtin.file:
path: '/media/atl/jira/shared/'
state: directory
- name: Seed version
copy:
ansible.builtin.copy:
dest: '/media/atl/jira/shared/jira-core.version'
content: "7.9.0"
force: false # For idempotency check

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
vv: true
skip-tags: runtime_pkg

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
vv: true
skip-tags: runtime_pkg

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -14,6 +12,8 @@ platforms:
- aws_node_local
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
vv: true
skip-tags: runtime_pkg

View File

@@ -1,9 +1,9 @@
---
- name: Fetch the latest version from URL
set_fact:
ansible.builtin.set_fact:
atl_product_version_json: "{{ lookup('url', 'https://marketplace.atlassian.com/rest/2/products/key/jira-software/versions') }}"
- name: Set the local var to retrieved version
set_fact:
ansible.builtin.set_fact:
atl_latest_version: "{{ atl_product_version_json._embedded.versions[0].name }}"

View File

@@ -1,7 +1,7 @@
---
- name: Check for alternate obr download url
set_fact:
ansible.builtin.set_fact:
atl_source_obr_from_marketplace: false
when:
- atl_obr_download_url is defined
@@ -10,40 +10,40 @@
block:
- name: Marketplace OBR - Get the installer product version info
uri:
ansible.builtin.uri:
url: "{{ atl_mpac_products }}/key/jira/versions/name/{{ atl_product_version }}"
return_content: yes
register: atl_product_version_info
- name: Marketplace OBR - Show the returned build number
debug:
ansible.builtin.debug:
msg="buildNumber={{ atl_product_version_info.json.buildNumber }}"
- name: Marketplace OBR - Get the JSD build version info
uri:
ansible.builtin.uri:
url: "{{ atl_mpac_products }}/key/jira-servicedesk/versions/latest?application=\
jira&applicationBuild={{ atl_product_version_info.json.buildNumber }}"
return_content: yes
register: atl_jsd_build_info
- name: Marketplace OBR - Show the returned obr binary href
debug:
ansible.builtin.debug:
msg="obr_ref={{ atl_jsd_build_info.json._embedded.artifact._links.binary.href }}"
- name: Marketplace OBR - Set atl_obr_download_url
set_fact:
ansible.builtin.set_fact:
atl_obr_download_url: "{{ atl_jsd_build_info.json._embedded.artifact._links.binary.href }}"
- name: Marketplace OBR - Set atl_jsd_build
set_fact:
ansible.builtin.set_fact:
atl_jsd_build: "{{ atl_jsd_build_info.json.name }}"
- name: Marketplace OBR - Show the obr filename
debug:
ansible.builtin.debug:
msg="obr_name=jira-servicedesk-application-{{ atl_jsd_build }}.obr"
- name: Marketplace OBR - Set the obr filename
set_fact:
ansible.builtin.set_fact:
atl_obr_filename: "jira-servicedesk-application-{{ atl_jsd_build }}.obr"
when:
@@ -56,18 +56,18 @@
block:
- name: Alternate URL OBR - Show the obr filename
debug:
ansible.builtin.debug:
msg="obr_name=jira-servicedesk-application-{{ atl_jsd_build }}.obr"
- name: Alternate OBR - Set the obr filename
set_fact:
ansible.builtin.set_fact:
atl_obr_filename: "jira-servicedesk-application-{{ atl_jsd_build }}.obr"
when:
- not atl_source_obr_from_marketplace | bool
- name: is shared_home set ?
debug:
ansible.builtin.debug:
msg="atl_product_home_shared_download_dir={{ atl_product_home_shared_download_dir }}"
# For the first run a temp obr should be downloaded but moved to
@@ -77,7 +77,7 @@
# a directory is used as a lockfile (atomic operation) when moving obr.
- name: Set assumptions to avoid race condition
set_fact:
ansible.builtin.set_fact:
download_obr: true
move_obr: false
atl_obr_download: "{{ atl_installer_temp }}/{{ atl_obr_filename }}"
@@ -87,22 +87,22 @@
# Check for pre-downloaded obr on shared_home and completed lock dir.
- name: Check for completed lock directory
stat:
ansible.builtin.stat:
path: "{{ atl_obr_completed_lock }}"
register: completed_lock
- name: Check for obr in home_shared
stat:
ansible.builtin.stat:
path: "{{ atl_obr_shared_download }}"
register: home_shared_download
- name: debug home_shared_download
debug:
ansible.builtin.debug:
var: home_shared_download
# If obr exists and lockdir exists use this obr instead
- name: Check lock directory and obr exists on shared_home
set_fact:
ansible.builtin.set_fact:
download_obr: false
atl_obr_download: "{{ atl_obr_shared_download }}"
when:
@@ -114,14 +114,14 @@
# Fetch obr if required - note we validate it by mimetype rather than checksum due to https://ecosystem.atlassian.net/browse/AMKT-25526
- name: download_obr is true so fetch and do all the things
block:
- debug:
- ansible.builtin.debug:
var: atl_obr_download_url
- debug:
- ansible.builtin.debug:
var: atl_obr_download
# Fetch obr and copy to temp
- name: Fetch obr
get_url:
ansible.builtin.get_url:
url: "{{ atl_obr_download_url }}"
dest: "{{ atl_obr_download }}"
mode: 0755
@@ -130,12 +130,12 @@
register: atl_obr_completed
- name: Confirm the output from the download task
debug:
ansible.builtin.debug:
var: atl_obr_completed
# get details about the obr
- name: Stat the new obr file
stat:
ansible.builtin.stat:
path: "{{ atl_obr_completed.dest }}"
get_mime: yes
register: atl_obr_stats
@@ -143,14 +143,14 @@
- atl_obr_completed.dest is defined
- name: fail if the downloaded OBR is not a zip file
fail:
ansible.builtin.fail:
msg: "The downloaded OBR was not detected as being a valid ZIP file: {{ atl_obr_stats }}"
when:
- (atl_obr_stats.stat.mimetype is not defined) or (atl_obr_stats.stat.mimetype is not match("application/zip"))
# If obr was fetched make the lock directory
- name: Create moving_lock.
file:
ansible.builtin.file:
path: "{{ atl_obr_moving_lock }}"
state: directory
when:
@@ -160,7 +160,7 @@
# Directory lock was created by this run?
# If so, then set a fact intending to move obr
- name: Move obr Scenario - lock created by this run
set_fact:
ansible.builtin.set_fact:
move_obr: true
when:
- moving_lock_created is succeeded
@@ -168,28 +168,28 @@
# Otherwise directory lock was either already created or
# could not be created. Fall back is to continue and install from temp
when: download_obr
when: download_obr | bool
# If the intention is to move obr to home_shared
- name: Move obr to home_shared
block:
- name: Copy temp installer to home_shared
copy:
ansible.builtin.copy:
src: "{{ atl_obr_download }}"
dest: "{{ atl_obr_shared_download }}"
remote_src: true
register: copied
- name: Create completed_lock once obr downloaded and copied
file:
ansible.builtin.file:
path: "{{ atl_obr_completed_lock }}"
state: directory
when: copied is succeeded
register: completed_lock_created
- name: Remove moving_lock to show that obr is completed
file:
ansible.builtin.file:
path: "{{ atl_obr_moving_lock }}"
state: absent
when:
@@ -198,24 +198,24 @@
register: moving_lock_removed
- name: Delete old temp installer
file:
ansible.builtin.file:
path: "{{ atl_obr_download }}"
state: absent
when: moving_lock_removed is succeeded
register: temp_deleted
- name: Set install to home_shared location
set_fact:
ansible.builtin.set_fact:
atl_obr_download: "{{ atl_obr_shared_download }}"
when: temp_deleted is succeeded
when: move_obr
when: move_obr | bool
# At this point the binary is in {{ atl_obr_download }}
# (which is either on home_shared or temp)
- name: Ensure instaled-plugins dir exists
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared }}/plugins/installed-plugins"
state: directory
mode: 0750
@@ -224,7 +224,7 @@
# Note as ansible unarchive cant handle "-j junk paths" we need to ignore errors to bypass the path verify
- name: Unpack the obr into the atlassian-bundled-plugins dir
unarchive:
ansible.builtin.unarchive:
remote_src: yes
src: "{{ atl_obr_download }}"
dest: "{{ atl_product_installation_versioned }}/atlassian-jira/WEB-INF/atlassian-bundled-plugins"
@@ -238,15 +238,15 @@
mode: 0644
register: obr_unpack
- name: Move JSD dependency jars into the bundled-plugins dir # noqa 503 - ignore lint info about when changed
copy:
- name: Move JSD dependency jars into the bundled-plugins dir # noqa no-handler - ignore lint info about when changed
ansible.builtin.copy:
remote_src: yes
src: "{{ atl_product_installation_versioned }}/atlassian-jira/WEB-INF/atlassian-bundled-plugins/dependencies/"
dest: "{{ atl_product_installation_versioned }}/atlassian-jira/WEB-INF/atlassian-bundled-plugins/"
when: obr_unpack.changed
- name: Remove the empty dependencies folder # noqa 503 - ignore lint info about when changed
file:
- name: Remove the empty dependencies folder # noqa no-handler - ignore lint info about when changed
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/atlassian-jira/WEB-INF/atlassian-bundled-plugins/dependencies"
state: absent
when: obr_unpack.changed

View File

@@ -1,5 +1,5 @@
---
- name: Set the download edition for ServiceDesk
set_fact:
ansible.builtin.set_fact:
atl_download_edition: "servicedesk"

View File

@@ -1,9 +1,9 @@
---
- name: Fetch the latest version from URL
set_fact:
ansible.builtin.set_fact:
atl_product_version_json: "{{ lookup('url', 'https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions') }}"
- name: Set the local var to retrieved version
set_fact:
ansible.builtin.set_fact:
atl_latest_version: "{{ atl_product_version_json._embedded.versions[0].name }}"

View File

@@ -1,9 +1,9 @@
---
- name: Fetch the latest version from URL
set_fact:
ansible.builtin.set_fact:
atl_product_version_json: "{{ lookup('url', 'https://marketplace.atlassian.com/rest/2/products/key/jira-software/versions') }}"
- name: Set the local var to retrieved version
set_fact:
ansible.builtin.set_fact:
atl_latest_version: "{{ atl_product_version_json._embedded.versions[0].name }}"

View File

@@ -2,7 +2,7 @@
- name: Check for existing version cache file
stat:
ansible.builtin.stat:
path: "{{ atl_product_version_cache }}"
register: cached
@@ -11,19 +11,19 @@
block:
- name: Read cached version from file
command: "cat {{ atl_product_version_cache }}"
ansible.builtin.command: "cat {{ atl_product_version_cache }}"
register: atl_product_version_file
changed_when: false
- name: Set the local var to cached version
set_fact:
ansible.builtin.set_fact:
atl_cached_version: "{{ atl_product_version_file.stdout }}"
when: cached.stat.exists
- name: Determine if requested version is 'latest'
set_fact:
ansible.builtin.set_fact:
version_is_latest: "{{ atl_product_version is undefined or
not atl_product_version or
atl_product_version == 'latest' }}"
@@ -33,9 +33,9 @@
block:
- name: Fetch the latest edition version
include_tasks: "{{ atl_product_edition }}_version_latest.yml"
ansible.builtin.include_tasks: "{{ atl_product_edition }}_version_latest.yml"
when: not cached.stat.exists and version_is_latest
when: not cached.stat.exists and version_is_latest | bool
######################################################################
@@ -63,64 +63,63 @@
block:
- name: "Case: Cached version exists, has precedence over 'latest'"
set_fact:
ansible.builtin.set_fact:
atl_download_version: "{{ atl_cached_version }}"
when: cached.stat.exists
- name: "Case: No cached version, use latest"
set_fact:
ansible.builtin.set_fact:
atl_download_version: "{{ atl_latest_version }}"
when: not cached.stat.exists
when: version_is_latest
when: version_is_latest | bool
- name: "Case: Version is not latest"
block:
- name: "create atlassian z versioning for comparison"
set_fact:
ansible.builtin.set_fact:
atl_z_product_version: "{{ atl_product_version ~ '-z' }}"
atl_z_cached_version: "{{ atl_cached_version ~ '-z' }}"
- name: "create atlassian ordered versioning for comparison"
set_fact:
ansible.builtin.set_fact:
atl_product_normalised_version: "{{ atl_z_product_version | replace('-m', '-am') }}"
atl_cached_normalised_version: "{{ atl_z_cached_version | replace('-m', '-am') }}"
- name: "Case: No cached version, or supplied is higher; use supplied"
set_fact:
ansible.builtin.set_fact:
atl_download_version: "{{ atl_product_version }}"
when: force_version_update | bool or
not cached.stat.exists or
atl_product_normalised_version is version(atl_cached_normalised_version, '>')
- name: "Case: Cached version is higher or forced, ignore supplied"
set_fact:
ansible.builtin.set_fact:
atl_download_version: "{{ atl_cached_version }}"
when: cached.stat.exists and
atl_product_normalised_version is version(atl_cached_normalised_version, '<=') and
not force_version_update | bool
when: not version_is_latest
when: not version_is_latest | bool
- name: "Fallthrough guard: Use cached or supplied version if nothing set"
set_fact:
ansible.builtin.set_fact:
atl_download_version: "{{ atl_cached_version or atl_product_version }}"
when: atl_download_version is not defined or
atl_download_version|length == 0
atl_download_version | length == 0
- name: Override the supplied version with the calculated one
set_fact:
ansible.builtin.set_fact:
atl_product_version: "{{ atl_download_version }}"
######################################################################
- name: Perform any additional per-edition version setup
include_tasks: "{{ atl_product_edition }}_extra_tasks.yml"
ansible.builtin.include_tasks: "{{ atl_product_edition }}_extra_tasks.yml"
- name: Create installation directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -136,7 +135,7 @@
# At this point atl_product_version should be set, cache if necessary.
- name: Write override cached version when specified
template:
ansible.builtin.template:
src: version.j2
dest: "{{ atl_product_version_cache }}"
force: true
@@ -148,25 +147,25 @@
# a directory is used as a lockfile (atomic operation) when moving binary.
- name: Set assumptions to avoid race condition
set_fact:
ansible.builtin.set_fact:
download_binary: true
move_binary: false
atl_product_download: "{{ atl_product_temp_download }}"
# Check for pre-downloaded binary on shared_home and completed lock dir.
- name: Check for completed lock directory
stat:
ansible.builtin.stat:
path: "{{ atl_product_home_shared_completed_lock }}"
register: completed_lock
- name: Check for product installer in home_shared
stat:
ansible.builtin.stat:
path: "{{ atl_product_home_shared_download }}"
register: home_shared_download
# If binary exists and lockdir exists use this binary instead
- name: Check lock directory and binary exists on shared_home
set_fact:
ansible.builtin.set_fact:
download_binary: false
atl_product_download: "{{ atl_product_home_shared_download }}"
when:
@@ -180,7 +179,7 @@
# Fetch binary and copy to temp
- name: Fetch binary
get_url:
ansible.builtin.get_url:
url: "{{ atl_product_download_url }}"
dest: "{{ atl_product_temp_download }}"
mode: 0755
@@ -189,7 +188,7 @@
# If product installer was fetched make the lock directory
- name: Create moving_lock.
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared_moving_lock }}"
state: directory
when:
@@ -199,7 +198,7 @@
# Directory lock was created by this run?
# If so, then set a fact intending to move binary
- name: Move binary Scenario - lock created by this run
set_fact:
ansible.builtin.set_fact:
move_binary: true
when:
- moving_lock_created is succeeded
@@ -207,14 +206,14 @@
# Otherwise directory lock was either already created or
# could not be created. Fall back is to continue and install from temp
when: download_binary
when: download_binary | bool
# If the intention is to move binary to home_shared
- name: Move product installer to home_shared
block:
- name: Copy temp installer to home_shared
copy:
ansible.builtin.copy:
src: "{{ atl_product_temp_download }}"
dest: "{{ atl_product_home_shared_download }}"
remote_src: true
@@ -224,14 +223,14 @@
register: copied
- name: Create completed_lock once product installer downloaded and copied
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared_completed_lock }}"
state: directory
when: copied is succeeded
register: completed_lock_created
- name: Remove moving_lock to show that binary is completed
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared_moving_lock }}"
state: absent
when:
@@ -240,33 +239,33 @@
register: moving_lock_removed
- name: Delete old temp installer
file:
ansible.builtin.file:
path: "{{ atl_product_temp_download }}"
state: absent
when: moving_lock_removed is succeeded
register: temp_deleted
- name: Set install to home_shared location
set_fact:
ansible.builtin.set_fact:
atl_product_download: "{{ atl_product_home_shared_download }}"
when: temp_deleted is succeeded
when: move_binary
when: move_binary | bool
# At this point the binary is in {{ atl_product_download }}
# (which is either on home_shared or temp)
- name: Unpack the downloaded application depending on format
include_tasks: "unpack_{{ atl_download_format }}.yml"
ansible.builtin.include_tasks: "unpack_{{ atl_download_format }}.yml"
- name: Symlink the installed version to current
file:
ansible.builtin.file:
src: "{{ atl_product_installation_versioned }}"
dest: "{{ atl_product_installation_current }}"
state: link
force: true
- name: "Ensure catalina.out log dir exists after product is installed (except bitbucket)"
file:
ansible.builtin.file:
path: "{{ atl_product_installation_current }}/logs"
state: directory
mode: 0750
@@ -276,5 +275,5 @@
changed_when: false # For Molecule idempotence check
- name: Include if jsd is requested to be installed from OBR
include_tasks: "jira-servicedesk_as_obr.yml"
when: atl_install_jsd_as_obr
ansible.builtin.include_tasks: "jira-servicedesk_as_obr.yml"
when: atl_install_jsd_as_obr | bool

View File

@@ -5,9 +5,9 @@
# product that supports the standard marketplace API.
- name: Fetch the latest version from URL
set_fact:
ansible.builtin.set_fact:
atl_product_version_json: "{{ lookup('url', '{{ atl_product_latest_version_url }}') }}"
- name: Set the local var to retrieved version
set_fact:
ansible.builtin.set_fact:
atl_latest_version: "{{ atl_product_version_json._embedded.versions[0].name }}"

View File

@@ -1,7 +1,7 @@
---
- name: Create installer varfile
template:
ansible.builtin.template:
src: "{{ atl_product_family }}.varfile.j2"
dest: "{{ atl_product_varfile }}"
mode: 0755
@@ -13,7 +13,7 @@
# The variable {{ atl_product_download }} will be on temp for first nodes and shared_home for
# subsequent nodes.
- name: Run the installer
command: /bin/sh "{{ atl_product_download }}" -q -varfile "{{ atl_product_varfile }}"
ansible.builtin.command: /bin/sh "{{ atl_product_download }}" -q -varfile "{{ atl_product_varfile }}"
args:
creates: "{{ atl_product_installation_versioned }}/.install4j/"
become: true

View File

@@ -1,7 +1,7 @@
---
- name: Unpack the product packages
unarchive:
ansible.builtin.unarchive:
remote_src: true
src: "{{ atl_product_download }}"
dest: "{{ atl_product_installation_versioned }}"

View File

@@ -24,7 +24,7 @@ atl_stop_exec_path: "{{ atl_product_installation_current }}/{{ atl_stop_script_m
atl_systemd_service_dir_map:
amazon: "/usr/lib/systemd/system"
ubuntu: "/lib/systemd/system"
atl_systemd_service_dir: "{{ atl_systemd_service_dir_map[ansible_distribution|lower] }}"
atl_systemd_service_dir: "{{ atl_systemd_service_dir_map[ansible_distribution | lower] }}"
atl_systemd_service_name: "{{ atl_product_edition }}.service"

View File

@@ -1,19 +1,19 @@
---
- name: Restart Product
service:
ansible.builtin.service:
name: "{{ atl_systemd_service_name }}"
state: restarted
when:
- atl_startup_restart
- atl_startup_restart | bool
- molecule_yml is not defined
no_log: true
- name: Enable Product
service:
ansible.builtin.service:
name: "{{ atl_systemd_service_name }}"
enabled: true
when:
- atl_startup_enable
- atl_startup_enable | bool
- molecule_yml is not defined
no_log: true

View File

@@ -27,7 +27,7 @@
pre_tasks:
- name: Create systemd dir if necessary
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
with_items:

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -10,7 +10,7 @@
pre_tasks:
- name: Create systemd dir if necessary
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
with_items:
@@ -21,17 +21,17 @@
- role: product_startup
post_tasks:
- include_vars: ../../defaults/main.yml
- ansible.builtin.include_vars: ../../defaults/main.yml
# workaround Molecule idempotence check
# normal pattern of setting changed_when allows file to be written twice, which takes extra time
- name: Check if vars have already been dumped
stat:
ansible.builtin.stat:
path: "{{ ansible_vars_dump_location }}"
register: ansible_vars_stat_result
- name: Dump vars to file for inspection
copy:
ansible.builtin.copy:
content: |
{{ vars | to_nice_yaml }}
dest: "{{ ansible_vars_dump_location }}"

View File

@@ -1,6 +1,4 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
@@ -18,6 +16,8 @@ platforms:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -10,17 +10,17 @@
ansible_vars_dump_location: "/tmp/ansible-vars.yml"
tasks:
- include_vars: ../../defaults/main.yml
- ansible.builtin.include_vars: ../../defaults/main.yml
# workaround Molecule idempotence check
# normal pattern of setting changed_when allows file to be written twice, which takes extra time
- name: Check if vars have already been dumped
stat:
ansible.builtin.stat:
path: "{{ ansible_vars_dump_location }}"
register: ansible_vars_stat_result
- name: Dump vars to file for inspection
copy:
ansible.builtin.copy:
content: |
{{ vars | to_nice_yaml }}
dest: "{{ ansible_vars_dump_location }}"

Some files were not shown because too many files have changed in this diff Show More