Merge remote-tracking branch 'origin/master' into playbook-analytics

This commit is contained in:
Geoff Jacobs
2024-08-19 11:48:43 +10:00
175 changed files with 2877 additions and 1874 deletions

11
.ansible-lint Normal file
View File

@@ -0,0 +1,11 @@
---
kinds:
- playbook: "./aws_*.yml"
offline: true
skip_list:
- ignore-errors
- meta-no-info
- name[casing] # so many of our task names aren't capitalized
- name[missing] # some tasks have no name
- risky-file-permissions
- yaml # many, many warnings

View File

@@ -0,0 +1,4 @@
---
dependency:
enabled: false
prerun: false

1
.gitignore vendored
View File

@@ -8,3 +8,4 @@ __pycache__
.envrc
.idea
.vscode
ansible_collections

36
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,36 @@
---
default_install_hook_types: [pre-commit, prepare-commit-msg]
default_stages: [pre-commit]
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: check-yaml
- id: detect-private-key
exclude: '(vault.yml.template|filter_catalina.yaml.j2)'
- id: trailing-whitespace
- repo: https://github.com/avilaton/add-msg-issue-prefix-hook
rev: v0.0.11
hooks:
- id: add-msg-issue-prefix
args:
- --template={}
- --pattern=[a-zA-Z0-9]{1,10}-[0-9]{1,6}
- repo: https://github.com/ansible/ansible-lint.git
rev: v24.2.2
hooks:
- id: ansible-lint
additional_dependencies:
- .
- ansible-core==2.16.6
- jmespath
language_version: python3.11
- repo: https://github.com/IamTheFij/ansible-pre-commit.git
rev: v0.1.2
hooks:
- id: encryption-check
always_run: true
files: vault.ya?ml$

View File

@@ -1,8 +1,7 @@
## Prerequisites
You should have the following software installed:
* Python; 3.x by preference, but 2.7 works.
* You may also need the Python development packages depending on how its installed.
* Python; 3.10 or newer (you may also need the Python development packages depending on how its installed)
* Python Virtualenv
* Docker
* Cloudtoken
@@ -17,23 +16,24 @@ All other requirements will be installed under Virtualenv.
### Step 1.2: Install development environment dependencies
To ensure compatibility we specify a specific Ansible version; currently 2.7.11
(some older versions had issues with RDS). We do this with
[Pipenv](https://docs.pipenv.org/) to lock the dependency tree. There are 2 main
ways to do this; either directly if packaged, or via pip...
To ensure compatibility we specify a specific Ansible version; currently
ansible-core 2.16.x. We do this with [Pipenv](https://docs.pipenv.org/) to lock
the dependency tree. There are 2 main ways to do this; either directly if
packaged, or via pip...
# Ubuntu 19.04+, Debian 10+
sudo apt-get install pipenv python-dev
# Ubuntu 22.04+, Debian 12+
sudo apt-get install python3-dev python3-pip pipenv
# Older versions & RHEL/Amazon Linux, etc.
sudo apt-get install -y python-pip python-dev
# Or...
sudo yum install -y python-pip python-dev
pip install pipenv
# Amazon Linux 2023
sudo dnf install python3.11 python3.11-pip python3.11-devel
pip3.11 install pipenv
# Mac via Homebrew
brew install pipenv
brew install libpq openssl@3 python@X.x # (where "X.x") is 3.1 or newer
export PATH="/opt/homebrew/opt/libpq/bin:$PATH"
export LDFLAGS="-L/opt/homebrew/opt/openssl@3/lib"
export CPPFLAGS="-I/opt/homebrew/opt/openssl@3/include"
pip3 install pipenv
### Step 1.3: Enter the development environment
@@ -44,11 +44,21 @@ development environment:
pipenv sync --dev
pipenv shell --dev
### Step 1.4: Run some tests against a role
### Step 1.4: Install Ansible collections
[Molecule](https://molecule.readthedocs.io/en/stable/) is a testing framework for Ansible. We use this to test the
functionality of individual and groups of roles, and to ensure cross-platform
compatibility (currently Amazon Linux 2 and Ubuntu LTS).
To save a little time during deployment, we rely directly on ansible-core and a
custom set of collections as opposed to installing the community edition. To that
end, when testing locally, you'll need these collections installed where Ansible
expects them to be; that path is configured ansible.cfg and used automatically
when collections are installed via `ansible-galaxy`:
ansible-galaxy collection install --upgrade --verbose --requirements-file requirements.yml
### Step 1.5: Run some tests against a role
[Molecule](https://molecule.readthedocs.io/en/stable/) is a testing framework for
Ansible. We use this to test the functionality of individual and groups of roles,
and to ensure cross-platform compatibility (currently Amazon Linux 2023 and Ubuntu LTS).
Were going to check that the role that downloads the products works for both
Jira Core and Confluence, on boths supported Linux distributions. So run the

22
Pipfile
View File

@@ -4,17 +4,21 @@ verify_ssl = true
name = "pypi"
[packages]
ansible = "==2.10.7"
boto3 = "==1.17.49"
botocore = "==1.20.49"
ansible-core = "==2.16.6"
cryptography = "==42.0.5"
boto3 = "==1.34.92"
botocore = "==1.34.92"
lxml = "==5.2.1"
psycopg2-binary = "==2.9.9"
[dev-packages]
molecule = "==3.2.2"
molecule-docker = "==0.2.4"
docker = "==4.4.1"
taskcat = "*"
molecule = "==24.2.1"
molecule-docker = "==2.1.0"
molecule-plugins = {extras = ["docker"], version = "==23.5.3"}
ansible-compat = "==4.1.11"
docker = "==7.0.0"
pytest = "*"
testinfra = "*"
pytest-testinfra = "*"
[requires]
python_version = "3.7"
python_version = "3"

1856
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -61,6 +61,20 @@ them in the `Custom command-line parameters for Ansible` field:
-e atl_product_download_url=http://s3.amazon.com/atlassian/jira-9.0.0-PRE-TEST.tar.gz -e atl_use_system_jdk=true -e atl_download_format=tarball
## Operating system/environment requirements
The roles in this repository currently target:
* Ansible-core 2.16
* Python >= 3.10 (as required by ansible-core 2.16)
* Amazon Linux 2023 and Debian 12+ (including derivatives, i.e., Ubuntu 22.04+) where the system-installable Python meets the above requirement
To use a previous version of this repository and the roles/playbooks within, your application nodes must clone/checkout
a previous commit that supports the desired OS and/or Ansible version. For instance, to continue using Ansible 2.13 on
Amazon Linux 2, use branch "ansible-core-2.13" and/or commit ID `e5af2cf649f72bb5c9d50d0057ddae4a5c99b6f9`. If using one
of the previously-provided AWS CloudFormation templates, you must set set the **Deployment Automation Branch** parameter
to "ansible-core-2.13" and/or manually set the stack's "pinned-ansible-sha" SSM Parameter to the referenced commit ID.
### Other customizable parameters
For more deployment customization options, consult the following files for parameters you can

View File

@@ -1,5 +1,4 @@
[defaults]
retry_files_enabled = False
callback_whitelist = profile_tasks
conditional_bare_variables = True
collections_paths = ./
callbacks_enabled = profile_tasks
collections_path = ./

View File

@@ -9,16 +9,16 @@
atl_product_edition: "bitbucket"
atl_product_user: "bitbucket"
atl_product_home: "{{ atl_shared_mountpoint }}/{{ atl_product_edition }}"
atl_use_system_jdk: true
java_major_version: "11" # BB 8 will drop JDK 8 support
java_major_version: "17"
atl_download_format: "tarball"
atl_product_home: "/var/atlassian/application-data/bitbucket"
atl_nfs_mountpoint: "{{ atl_shared_mountpoint }}/bitbucket/shared"
atl_nfs_target: "{{ atl_shared_mountpoint }}/bitbucket/shared"
atl_nfs_version: "3"
atl_startup_systemd_params:
- "UMask=0027"
- "LimitNOFILE=4096"
@@ -35,11 +35,11 @@
- role: aws_common
# For Bitbucket DC clusters that store repos on Bitbucket Mesh(https://confluence.atlassian.com/bitbucketserver/bitbucket-data-center-and-server-8-0-release-notes-1115659343.html#BitbucketDataCenterandServer8.0releasenotes-mesh),
# nodes may be setup to use EFS instead of NFS for shared_home by not defining 'atl_fileserver_host'
- { role: aws_shared_fs_config, when: (atl_fileserver_host is not defined or atl_fileserver_host |length == 0) and (atl_efs_id|length > 0) }
- { role: aws_shared_fs_config, when: (atl_fileserver_host is not defined or atl_fileserver_host | length == 0) and (atl_efs_id | length > 0) }
- { role: nfs_mount, when : (atl_fileserver_host is defined) and (atl_fileserver_host|length > 0) }
- role: product_common
- role: product_install
- role: database_init
- { role: database_init, tags: [database] }
- role: bitbucket_config
- role: product_startup
- role: bitbucket_dataset_restore

View File

@@ -0,0 +1,34 @@
---
- hosts: aws_node_local
become: true
vars:
# See group_vars/aws_node_local.yml, which pull vars from the environment.
atl_product_family: "stash"
atl_product_edition: "mesh"
atl_product_user: "bitbucket"
atl_product_home: "{{ atl_home_base }}/{{ atl_product_edition }}"
atl_systemd_service_name: "mesh.service"
atl_startup_systemd_params:
- 'UMask=0027'
- 'Environment=MESH_HOME={{ atl_home_base }}/{{ atl_product_edition }}'
- 'Environment=JAVA_HOME=/usr/lib/jvm/java'
- 'Environment=JRE_HOME=/usr/lib/jvm/java'
- 'Environment=JMX_REMOTE_AUTH=password'
- 'Environment=JMX_PASSWORD_FILE={{ atl_home_base }}/{{ atl_product_edition }}/jmx.access'
- 'Environment="JVM_SUPPORT_RECOMMENDED_ARGS=-Dmesh.enabled=true -Dplugin.bitbucket-git.mesh.sidecar.child-process=false -Dcom.sun.management.jmxremote.port=4444 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath={{ atl_home_base }}/{{ atl_product_edition }}/log"'
- 'Environment=JVM_MAXIMUM_MEMORY={{ atl_jvm_heap }}'
- 'PassEnvironment=JMX_REMOTE_AUTH JMX_PASSWORD_FILE JAVA_HOME'
atl_startup_exec_path: "{{ mesh_install_dir }}/current/bin/start-mesh.sh"
atl_stop_exec_path: "{{ mesh_install_dir }}/current/bin/stop-mesh.sh"
atl_systemd_service_target: "multi-user.target"
atl_startup_exec_options: []
roles:
- role: linux_common
- role: aws_common
- role: aws_shared_fs_config
- role: product_common
- role: bitbucket_mesh
- role: product_startup

View File

@@ -20,6 +20,7 @@ set +a
pipenv run \
ansible-playbook -v \
$ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \
-e "ansible_python_interpreter=$(pipenv --venv)/bin/python" \
-e "${PLAYBOOK_INVOCATION_EXTRA_PARAMS}" \
-i $INV \
$PLAYBOOK \

View File

@@ -2,34 +2,41 @@
set -e
source /etc/os-release
if [[ $ID = "amzn" ]]; then
yum install -y \
python3-devel \
python3-pip \
python2-boto3 \
python2-botocore \
python-lxml
PIPENV_PYTHON="3"
source /etc/os-release
if [[ $ID = "amzn" ]] && [[ $VERSION = "2" ]]; then
echo "Amazon Linux 2 is no longer supported; see README.md for supported operating systems/environments."
exit 1
elif [[ $ID = "amzn" ]] && [[ $VERSION = "2023" ]]; then
dnf install -y \
python3.11 \
python3.11-pip \
python3.11-devel
echo "Installing pipenv..."
pip3.11 install pipenv
PIPENV_PYTHON="3.11"
else
# FIXME: Currently assumes Debian-based
apt-get update && \
apt-get install -y \
python3-dev \
python3-pip
python3-pip \
pipenv
fi
export PATH=/usr/local/bin:$PATH
export PIP_DEFAULT_TIMEOUT=60
echo "Installing pipenv..."
pip3 install pipenv
echo "Installing ansible and dependencies..."
PIPENV_NOSPIN=1 PIPENV_HIDE_EMOJIS=1 pipenv sync 2>&1 | iconv -c -f utf-8 -t ascii
PIPENV_NOSPIN=1 PIPENV_HIDE_EMOJIS=1 pipenv --python $PIPENV_PYTHON sync 2>&1 | iconv -c -f utf-8 -t ascii
if [[ $1 == "--dev" ]]; then
pipenv sync --dev
fi
echo "Installing collections from galaxy..."
pipenv run ansible-galaxy collection install -v -r requirements.yml
galaxy_retry_count=0
until [[ $galaxy_retry_count -gt 2 ]]; do
pipenv run ansible-galaxy collection install --upgrade --verbose --requirements-file requirements.yml && break
galaxy_retry_count=$((galaxy_retry_count + 1))
done

View File

@@ -4,11 +4,13 @@
#
# make > ../bitbucket-pipelines.yml
image: debian:buster
image: debian:bookworm
options:
size: 2x
definitions:
caches:
ansible-collections: ansible_collections
services:
docker:
memory: 4096
@@ -35,342 +37,532 @@ pipelines:
- parallel:
- step:
name: aws_common/cw-disabled
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s cw-disabled
- step:
name: aws_common/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s default
- step:
name: aws_common/logs-disabled
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/aws_common
- pipenv run molecule test -s logs-disabled
- step:
name: bitbucket_config/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/bitbucket_config
- pipenv run molecule test -s default
- step:
name: bitbucket_config/iam_elasticsearch
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/bitbucket_config
- pipenv run molecule test -s iam_elasticsearch
- step:
name: confluence_config/aurora
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s aurora
- step:
name: confluence_config/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s default
- step:
name: confluence_config/password_char_escaping
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s password_char_escaping
- step:
name: confluence_config/system_jdk
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/confluence_config
- pipenv run molecule test -s system_jdk
- step:
name: diy_backup/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/diy_backup
- pipenv run molecule test -s default
- step:
name: jira_config/aurora
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s aurora
- step:
name: jira_config/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s default
- step:
name: jira_config/jira_config_props
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s jira_config_props
- step:
name: jira_config/password_char_escaping
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/jira_config
- pipenv run molecule test -s password_char_escaping
- step:
name: linux_common/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/linux_common
- pipenv run molecule test -s default
- step:
name: product_common/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_common
- pipenv run molecule test -s default
- step:
name: product_common/system_jdk
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_common
- pipenv run molecule test -s system_jdk
- step:
name: product_install/bitbucket_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s bitbucket_latest
- step:
name: product_install/confluence_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s confluence_latest
- step:
name: product_install/confluence_version_with_uppercase
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s confluence_version_with_uppercase
- step:
name: product_install/crowd_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s crowd_latest
- step:
name: product_install/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s default
- step:
name: product_install/jira_all
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_all
- step:
name: product_install/jira_tarball_download_url
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball_download_url
- step:
name: product_install/jira_cached_with_downgrade
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_cached_with_downgrade
- step:
name: product_install/jira_cached_with_upgrade
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_cached_with_upgrade
- step:
name: product_install/jira_software_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_software_latest
- step:
name: product_install/jira_tarball
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball
- step:
name: product_install/jira_version_from_file
name: product_install/jira_tarball_download_url
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_tarball_download_url
- step:
name: product_install/jira_version_from_file
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_from_file
- step:
name: product_install/jira_version_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_latest
- step:
name: product_install/jira_version_override
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s jira_version_override
- step:
name: product_install/servicedesk3
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk3
- step:
name: product_install/servicedesk4
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk4
- step:
name: product_install/servicedesk_latest
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_install
- pipenv run molecule test -s servicedesk_latest
- step:
name: product_startup/bitbucket
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s bitbucket
- step:
name: product_startup/default
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s default
- step:
name: product_startup/startup_restart_false
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
- pipenv run molecule test -s startup_restart_false
- step:
name: product_startup/synchrony
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/product_startup
@@ -378,11 +570,16 @@ pipelines:
- step:
name: Run Snyk security scan
caches:
- docker
- pip
- node
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- apt-get update && apt-get install -y npm
- npm install -g snyk@1.455.0
- npm install -g snyk
- snyk auth $SNYK_TOKEN
- pipenv run snyk monitor --severity-threshold=high --project-name=dc-deployments-automation

View File

@@ -38,7 +38,7 @@ atl_installer_temp: "{{ atl_installation_base }}/tmp"
# installed and linked to `/usr/lib/jvm/java`.
# See product_common/task/ubuntu.yml for an example of apropriate
# linking using `alternatives`.
atl_java_home: "{{ '/usr/lib/jvm/java' if atl_use_system_jdk else (atl_product_installation_current + '/jre') }}"
atl_java_home: "{{ '/usr/lib/jvm/java' if atl_use_system_jdk | bool else (atl_product_installation_current + '/jre') }}"
atl_java_binary: "{{ atl_java_home }}/bin/java"
atl_product_logs_default: &logs_default
@@ -79,8 +79,8 @@ atl_aws_region: "{{ lookup('env', 'ATL_AWS_REGION') }}"
atl_aws_iam_role: "{{ lookup('env', 'ATL_AWS_IAM_ROLE') }}"
atl_aws_iam_role_arn: "{{ lookup('env', 'ATL_AWS_IAM_ROLE_ARN') }}"
atl_aws_enable_cloudwatch: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH')|bool or false }}"
atl_aws_enable_cloudwatch_logs: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH_LOGS')|bool or false }}"
atl_aws_enable_cloudwatch: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH') | bool or false }}"
atl_aws_enable_cloudwatch_logs: "{{ lookup('env', 'ATL_AWS_ENABLE_CLOUDWATCH_LOGS') | bool or false }}"
atl_db_engine: "{{ lookup('env', 'ATL_DB_ENGINE') }}"
atl_db_host: "{{ lookup('env', 'ATL_DB_HOST') }}"
@@ -100,13 +100,15 @@ atl_db_timebetweenevictionrunsmillis: "{{ lookup('env', 'ATL_DB_TIMEBETWEENEVICT
atl_db_minevictableidletimemillis: "{{ lookup('env', 'ATL_DB_MINEVICTABLEIDLETIMEMILLIS') or '5000' }}"
atl_db_removeabandoned: "{{ lookup('env', 'ATL_DB_REMOVEABANDONED') or 'true' }}"
atl_db_removeabandonedtimeout: "{{ lookup('env', 'ATL_DB_REMOVEABANDONEDTIMEOUT') or '300' }}"
atl_db_testwhileidle: "{{ lookup('env', 'ATL_DB_TESTWHILEIDLE') or 'true'}}"
atl_db_testwhileidle: "{{ lookup('env', 'ATL_DB_TESTWHILEIDLE') or 'true' }}"
atl_db_testonborrow: "{{ lookup('env', 'ATL_DB_TESTONBORROW') or 'false' }}"
atl_db_engine_to_db_type_map:
aurora_postgres: "postgresaurora96"
rds_postgres: "postgres72"
atl_db_type: "{{ atl_db_engine_to_db_type_map[atl_db_engine] | default('postgres72') }}"
atl_download_secret_name: "{{ lookup('env', 'ATL_DOWNLOAD_SECRET_NAME') or '' }}"
atl_jdbc_db_name: "{{ lookup('env', 'ATL_JDBC_DB_NAME') }}"
atl_jdbc_user: "{{ lookup('env', 'ATL_JDBC_USER') }}"
atl_jdbc_password: "{{ lookup('env', 'ATL_JDBC_PASSWORD') }}"
@@ -116,7 +118,10 @@ atl_jdbc_ctype: "{{ lookup('env', 'ATL_JDBC_CTYPE') or 'en_US.UTF-8' }}"
atl_jdbc_template: "{{ lookup('env', 'ATL_JDBC_TEMPLATE') or 'template1' }}"
atl_jdbc_query_params_for_engine:
aurora_postgres: "?targetServerType=master"
atl_jdbc_url: "jdbc:postgresql://{{ atl_db_host }}:{{ atl_db_port }}/{{ atl_jdbc_db_name }}{{ atl_jdbc_query_params_for_engine[atl_db_engine]| default('') }}"
atl_jdbc_url: "jdbc:postgresql://{{ atl_db_host }}:{{ atl_db_port }}/{{ atl_jdbc_db_name }}{{ atl_jdbc_query_params_for_engine[atl_db_engine] | default('') }}"
atl_secretsmanager_aws_region: "{{ lookup('env', 'ATL_SECRETSMANAGER_AWS_REGION') }}"
atl_secretsmanager_aws_secret_id: "{{ lookup('env', 'ATL_SECRETSMANAGER_AWS_SECRET_ID') }}"
atl_jvm_heap: "{{ lookup('env', 'ATL_JVM_HEAP') or '2048m' }}"
atl_jvm_opts: "{{ lookup('env', 'ATL_JVM_OPTS') or '' }}"

View File

@@ -4,11 +4,13 @@
#
# make > ../bitbucket-pipelines.yml
image: debian:buster
image: debian:bookworm
options:
size: 2x
definitions:
caches:
ansible-collections: ansible_collections
services:
docker:
memory: 4096
@@ -36,9 +38,14 @@ pipelines:
{% for spath in scenario_paths %}
- step:
name: {{ spath.parts[2] }}/{{ spath.parts[4] }}
caches:
- ansible-collections
- docker
- pip
services:
- docker
script:
- apt-get update && apt-get install -y rsync
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- cd roles/{{ spath.parts[2] }}
@@ -47,9 +54,14 @@ pipelines:
- step:
name: Run Snyk security scan
caches:
- docker
- pip
- node
services:
- docker
script:
- export ANSIBLE_CONFIG=./ansible.cfg
- ./bin/install-ansible --dev
- apt-get update && apt-get install -y npm
- npm install -g snyk

View File

@@ -1,4 +1,12 @@
---
collections:
- name: amazon.aws
version: 3.0.0
version: "7.5.0"
- name: ansible.posix
version: "1.5.4"
- name: community.docker
version: "3.9.0"
- name: community.general
version: "8.6.0"
- name: community.postgresql
version: "3.4.0"

View File

@@ -1,15 +1,15 @@
---
- name: Enable CloudWatch Agent
systemd:
ansible.builtin.systemd_service:
name: "amazon-cloudwatch-agent.service"
daemon_reload: true
enabled: true
when: atl_aws_agent_restart
when: atl_aws_agent_restart | bool
- name: Restart CloudWatch Agent
systemd:
ansible.builtin.systemd_service:
name: "amazon-cloudwatch-agent.service"
enabled: true
state: restarted
when: atl_aws_agent_restart
when: atl_aws_agent_restart | bool

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,17 +1,17 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
# - name: ubuntu_lts
# image: ubuntu:bionic
# image: ubuntu:jammy
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,17 +1,17 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
# - name: ubuntu_lts
# image: ubuntu:bionic
# image: ubuntu:jammy
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,17 +1,17 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
# - name: ubuntu_lts
# image: ubuntu:bionic
# image: ubuntu:jammy
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -0,0 +1,5 @@
---
- name: Amazon Linux 2 no longer supported
fail:
msg: "Amazon Linux 2 is no longer supported; see README.md for supported operating systems/environments."

View File

@@ -0,0 +1,25 @@
---
- name: Install AWS support packages
ansible.builtin.dnf:
name:
- amazon-efs-utils
- amazon-ssm-agent
- awscli
- git
- ec2-utils
# https://github.com/amazonlinux/amazon-linux-2023/issues/164
- name: Ensure group "aoc" exists
ansible.builtin.group:
name: aoc
state: present
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch | bool
- name: Install CloudWatch Agent
ansible.builtin.dnf:
name:
- amazon-cloudwatch-agent
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch | bool
notify:
- Enable CloudWatch Agent

View File

@@ -1,18 +0,0 @@
---
- name: Install AWS support packages
yum:
name:
- amazon-efs-utils
- amazon-ssm-agent
- awscli
- git
- ec2-utils
- name: Install CloudWatch Agent
yum:
name:
- "{{ aws_cloudwatch_agent_rpm }}"
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch
notify:
- Enable CloudWatch Agent

View File

@@ -1,31 +1,32 @@
---
- name: Fetch local EC2 metadata
ec2_metadata_facts:
amazon.aws.ec2_metadata_facts:
tags:
- notest
- name: Install distro-specific prerequisites
include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Install Amazon-Linux-specific prerequisites
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}-{{ ansible_distribution_version }}.yml"
when: ansible_distribution | lower == 'amazon'
- name: Use EC2 instance ID for cluster node ID
set_fact:
ansible.builtin.set_fact:
atl_cluster_node_id: "{{ ansible_ec2_instance_id }}"
atl_local_ipv4: "{{ ansible_ec2_local_ipv4 | default(ansible_default_ipv4.address) }}"
- name: Generate CloudWatch config
template:
ansible.builtin.template:
src: "amazon-cloudwatch-agent.json.j2"
dest: "/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json"
owner: root
group: root
mode: 0644
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch
when: atl_aws_enable_cloudwatch is defined and atl_aws_enable_cloudwatch | bool
notify:
- Restart CloudWatch Agent
- name: Store some metadata about this run
include_tasks: "write-tags.yml"
ansible.builtin.include_tasks: "write-tags.yml"
- name: Initiate the startup of any new AWS services now
meta: flush_handlers
ansible.builtin.meta: flush_handlers

View File

@@ -1,28 +1,27 @@
---
- name: Retrieve all available EC2 tags
ec2_tag:
amazon.aws.ec2_tag_info:
region: "{{ ansible_ec2_placement_region }}"
resource: "{{ ansible_ec2_instance_id }}"
state: list
register: ec2_instance_tags
ignore_errors: true
tags:
- notest
- name: Retrieve autoscaling group
set_fact:
ansible.builtin.set_fact:
ec2_autoscaling_group: "{{ ec2_instance_tags.tags['aws:autoscaling:groupName'] | default('') }}"
- block:
# No existing timestamp, so this is a first run. Persist some metadata into the ASG.
- name: Fetch the git revision for this repo
command:
- name: Fetch the git revision for this repo # noqa: command-instead-of-module no-changed-when
ansible.builtin.command:
cmd: git rev-parse HEAD
register: git_out
- name: Setup the new ASG tags
set_fact:
ansible.builtin.set_fact:
deployment_firstrun_meta:
- ResourceType: "auto-scaling-group"
ResourceId: "{{ ec2_autoscaling_group }}"
@@ -40,15 +39,15 @@
# Set the tags on the ASG and the local instance. We need to
# ignore errors as it's possible we don't have the permissions,
# and we can't check up-front.
- name: Set the first-run tags on the ASG ("FAIL" is not critical)
command: "aws autoscaling
- name: Set the first-run tags on the ASG ("FAIL" is not critical) # noqa: no-changed-when
ansible.builtin.command: "aws autoscaling
create-or-update-tags
--region {{ ansible_ec2_placement_region }}
--tags '{{ deployment_firstrun_meta | to_json }}'"
ignore_errors: true
- name: Set the tags on the local instance ("FAIL" is not critical)
ec2_tag:
amazon.aws.ec2_tag:
region: "{{ ansible_ec2_placement_region }}"
resource: "{{ ansible_ec2_instance_id }}"
tags:

View File

@@ -4,7 +4,7 @@
"run_as_user": "root"
},
{% if atl_aws_enable_cloudwatch_logs is defined and atl_aws_enable_cloudwatch_logs %}
{% if atl_aws_enable_cloudwatch_logs is defined and atl_aws_enable_cloudwatch_logs | bool %}
"logs": {
"logs_collected": {
"files": {

View File

@@ -1,3 +1,3 @@
---
dependencies:
- aws_common
- role: aws_common

View File

@@ -1,13 +1,13 @@
---
- name: Create mountpoint
file:
ansible.builtin.file:
state: directory
path: "{{ atl_shared_mountpoint }}"
mode: 0755
- name: Enable mountpoint in fstab
mount:
ansible.posix.mount:
path: "{{ atl_shared_mountpoint }}"
src: "{{ efs_target }}:{{ efs_src_dir }}"
fstype: "{{ efs_type }}"

View File

@@ -1,4 +1,7 @@
---
galaxy_info:
namespace: dc_deployments_automation
dependencies:
- bitbucket_common
- role: bitbucket_common

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -16,6 +16,7 @@
atl_elasticsearch_password: password
atl_bitbucket_properties_raw: "key1=val1 key2=val2 key3=val3"
skip_shared_home_symlink: true
roles:
- role: linux_common

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -15,9 +15,9 @@ def test_config_file(host):
assert f.contains("jdbc.user=bb_db_user")
assert f.contains("jdbc.password=molecule_password")
assert f.contains("plugin.search.elasticsearch.username=bitbucket")
assert f.contains("plugin.search.elasticsearch.password=password")
assert not f.contains("plugin.search.elasticsearch.aws.region")
assert f.contains("plugin.search.config.username=bitbucket")
assert f.contains("plugin.search.config.password=password")
assert not f.contains("plugin.search.config.aws.region")
assert f.contains("^key1=val1$")
assert f.contains("^key2=val2$")

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -13,7 +13,7 @@
atl_jdbc_password: 'molecule_password'
atl_aws_region: us-east-2
skip_shared_home_symlink: true
roles:
- role: linux_common
- role: product_common

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -10,6 +10,6 @@ def test_config_file(host):
f = host.file('/media/atl/bitbucket/shared/bitbucket.properties')
assert f.exists
assert not f.contains("plugin.search.elasticsearch.username")
assert not f.contains("plugin.search.elasticsearch.password")
assert f.contains("plugin.search.elasticsearch.aws.region=us-east-2")
assert not f.contains("plugin.search.config.username")
assert not f.contains("plugin.search.config.password")
assert f.contains("plugin.search.config.aws.region=us-east-2")

View File

@@ -1,22 +1,28 @@
---
- name: Create Bitbucket shared dir if necessary
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
mode: 0750
state: directory
- name: Check if Bitbucket config file exists
ansible.builtin.stat:
path: "{{ atl_product_home_shared }}/bitbucket.properties"
register: bitbucket_config
- name: Create Bitbucket config file
template:
ansible.builtin.template:
src: bitbucket.properties.j2
dest: "{{ atl_product_home_shared }}/bitbucket.properties"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
when: not bitbucket_config.stat.exists
- name: Remove write permissions from installation directory
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
@@ -25,7 +31,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"

View File

@@ -12,15 +12,15 @@ hazelcast.network.aws.region={{ atl_aws_region }}
hazelcast.network.aws.tag.value={{ atl_aws_stack_name }}
hazelcast.group.name={{ atl_aws_stack_name }}
hazelcast.group.password={{ atl_aws_stack_name }}
plugin.search.elasticsearch.baseurl={{ atl_elasticsearch_endpoint }}
{% if elasticsearch_should_auth_with_iam %}
plugin.search.elasticsearch.aws.region={{ atl_aws_region }}
plugin.search.config.baseurl={{ atl_elasticsearch_endpoint }}
{% if elasticsearch_should_auth_with_iam | bool %}
plugin.search.config.aws.region={{ atl_aws_region }}
{% else %}
plugin.search.elasticsearch.username={{ atl_elasticsearch_username }}
plugin.search.elasticsearch.password={{ atl_elasticsearch_password }}
plugin.search.config.username={{ atl_elasticsearch_username }}
plugin.search.config.password={{ atl_elasticsearch_password }}
{% endif %}
setup.displayName=Bitbucket
setup.baseUrl = {{ atl_bitbucket_baseurl }}
setup.baseUrl={{ atl_bitbucket_baseurl }}
setup.license={{ atl_bitbucket_license_key }}
setup.sysadmin.username=admin
setup.sysadmin.password={{ atl_bitbucket_admin_password }}

View File

@@ -1,4 +1,4 @@
---
dependencies:
- bitbucket_common
- role: bitbucket_common

View File

@@ -1,24 +1,24 @@
---
- name: Force all notified handlers to run at this point, not waiting for normal sync points
meta: flush_handlers
ansible.builtin.meta: flush_handlers
- name: wait for port 7990 to be up
wait_for:
ansible.builtin.wait_for:
port: 7990
delay: 60
- name: wait for path to become available
wait_for:
ansible.builtin.wait_for:
path: "{{ atl_product_home_shared }}/data/migration/import"
delay: 60
- name: Copy Bitbucket dataset from s3
get_url:
ansible.builtin.get_url:
url: "{{ atl_bitbucket_dataset_url }}"
dest: "{{ atl_product_home_shared }}/data/migration/import"
- name: Invoke Import API
uri:
ansible.builtin.uri:
url: "{{ atl_bitbucket_baseurl }}/rest/api/1.0/migration/imports"
user: admin
password: "{{ atl_bitbucket_admin_password }}"
@@ -36,7 +36,7 @@
failed_when: output is defined and output.json is defined and output.json.state != 'INITIALISING'
- name: get import status
uri:
ansible.builtin.uri:
url: "{{ atl_bitbucket_baseurl }}/rest/api/1.0/migration/imports/{{ output.json.id }}"
user: admin
password: "{{ atl_bitbucket_admin_password }}"
@@ -50,7 +50,7 @@
delay: 10
- name: create lock file
file:
ansible.builtin.file:
path: "{{ atl_product_home_shared }}/data/migration/import/lock.file"
state: touch
when: import_status.json.state == 'COMPLETED'

View File

@@ -0,0 +1,7 @@
mesh_install_dir: /opt/atlassian/mesh
bitbucket_mesh_maven_repo: https://packages.atlassian.com/maven-external
bitbucket_mesh_version: "1.3.1"
# if basic_auth is required for download of atlassian installable artifact, provide the name of an AWS Secrets Manager secret
# with values for both password and username
atl_download_secret_name: ''

View File

@@ -0,0 +1,19 @@
---
- name: Restart Product
ansible.builtin.service:
name: "{{ atl_systemd_service_name }}"
state: restarted
when:
- atl_startup_restart
- molecule_yml is not defined
no_log: true
- name: Enable Product
ansible.builtin.service:
name: "{{ atl_systemd_service_name }}"
enabled: true
when:
- atl_startup_enable
- molecule_yml is not defined
no_log: true

View File

@@ -0,0 +1,88 @@
---
- name: Create Bitbucket dirs if necessary
ansible.builtin.file:
path: "{{ item }}"
owner: "{{ atl_product_user_uid }}"
group: "{{ atl_product_user_uid }}"
mode: 0750
state: directory
recurse: no
with_items:
- "{{ atl_home_base }}/{{ atl_product_edition }}"
- "{{ atl_home_base }}/{{ atl_product_user }}"
- "{{ mesh_install_dir }}"
# optionally grab basic_auth creds from secrets_manager secret called 'download_atlassian'
- name: set basic_auth facts if the secret exists
ansible.builtin.set_fact:
download_atlassian_password: "{{ lookup('amazon.aws.aws_secret', atl_download_secret_name + '.password', region=ansible_ec2_placement_region, bypath=false, nested=true, on_denied='skip', on_missing='skip') }}"
download_atlassian_username: "{{ lookup('amazon.aws.aws_secret', atl_download_secret_name + '.username', region=ansible_ec2_placement_region, bypath=false, nested=true, on_denied='skip', on_missing='skip') }}"
failed_when: false
ignore_errors: yes
no_log: true
when:
- ansible_ec2_placement_region is defined
- atl_download_secret_name is defined
tags:
- runtime_pkg
# Fetch binary and copy to temp
# optionally use basic_auth creds from secrets_manager
- name: Fetch binary
ansible.builtin.get_url:
url: "{{ atl_product_download_url }}"
dest: "{{ mesh_install_dir }}"
url_password: "{{ download_atlassian_password | default(omit) }}"
url_username: "{{ download_atlassian_username | default(omit) }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
mode: 0644
force: false
register: maven_download
- name: extract the downloaded artifact
ansible.builtin.unarchive:
src: "{{ maven_download.dest }}"
dest: "/opt/atlassian/mesh/"
creates: "/opt/atlassian/mesh/atlassian-bitbucket-mesh-{{ atl_product_version }}"
mode: 0755
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
register: mesh_extract
when:
- maven_download.changed | bool
# the owner/group on the unarchive above isn't thorough
- name: adjust permissions on the extracted directory
ansible.builtin.file:
state: directory
path: "/opt/atlassian/mesh/atlassian-bitbucket-mesh-{{ atl_product_version }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
recurse: yes
- name: symlink to the current version
ansible.builtin.file:
src: "/opt/atlassian/mesh/atlassian-bitbucket-mesh-{{ atl_product_version }}"
dest: "/opt/atlassian/mesh/current"
state: link
when:
- mesh_extract.changed | bool
- name: touch the jmx password file
ansible.builtin.file:
path: "{{ atl_home_base }}/{{ atl_product_edition }}/jmx.access"
state: touch
owner: "{{ atl_product_user_uid }}"
group: "{{ atl_product_user_uid }}"
mode: 0600
# - name: template out mesh.properties
# ansible.builtin.template:
# src: mesh.properties.j2
# dest: "{{ atl_home_base }}/{{ atl_product_edition }}/mesh.properties"
# owner: "{{ atl_product_user }}"
# group: "{{ atl_product_user }}"
# mode: 0600

View File

@@ -0,0 +1,10 @@
# Listen for gRPC requests on all interfaces by default. This allows connecting to the node remotely
grpc.server.address=0.0.0.0
authentication.token={{ ansible_hostname | hash('md5') }}
node.name={{ ansible_hostname }}
node.id={{ ansible_hostname }}
jmx.enabled={{ mesh_jmx_enabled | default(false) }}
management.metrics.export.jmx.domain={{ mesh_jmx_export_domain | default("") }}
metrics.tags.host={{ ansible_hostname }}

View File

@@ -19,6 +19,10 @@ atl_hazelcast_network_aws_iam_region: "{{ lookup('env', 'ATL_HAZELCAST_NETWORK_A
atl_hazelcast_network_aws_iam_role: "{{ lookup('env', 'ATL_HAZELCAST_NETWORK_AWS_IAM_ROLE') }}"
atl_hazelcast_network_aws_tag_value: "{{ lookup('env', 'ATL_HAZELCAST_NETWORK_AWS_TAG_VALUE') }}"
atl_opensearch_endpoint: "{{ lookup('env', 'ATL_OPENSEARCH_ENDPOINT') }}"
atl_opensearch_password: "{{ lookup('env', 'ATL_OPENSEARCH_PASSWORD') }}"
atl_opensearch_user: "{{ lookup('env', 'ATL_OPENSEARCH_USER') }}"
atl_catalina_opts: ""
atl_catalina_opts_extra: >-
-Datlassian.event.thread_pool_configuration.queue_size=4096
@@ -33,7 +37,7 @@ atl_catalina_opts_extra: >-
-Dsynchrony.proxy.enabled=false
-Dconfluence.cluster.node.name={{ atl_local_ipv4 }}
-Dconfluence.cluster.hazelcast.max.no.heartbeat.seconds=60
{% if atl_synchrony_service_url|string|length %}-Dsynchrony.service.url={{ atl_synchrony_service_url }}{% endif %}
{% if atl_synchrony_service_url | string | length %}-Dsynchrony.service.url={{ atl_synchrony_service_url }}{% endif %}
atl_tomcat_port: "8080"
atl_tomcat_mgmt_port: "8005"

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,19 +1,21 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -16,7 +16,7 @@
atl_autologin_cookie_age: "COOKIEAGE"
atl_local_ipv4: "1.1.1.1"
atl_tomcat_scheme: "http"
atl_tomcat_contextpath: "foo"
atl_tomcat_contextpath: "/foo"
atl_proxy_name: "localhost"
atl_proxy_port: "80"
atl_db_preferredtestquery: "select 1;"

View File

@@ -1,19 +1,21 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -38,7 +38,7 @@ def test_server_file(host):
assert f.exists
assert f.contains('Connector port="8080"')
assert f.contains('Server port="8005"')
assert f.contains('<Context path="foo"')
assert f.contains('<Context path="/foo"')
assert f.contains('maxThreads="200"')
assert f.contains('minSpareThreads="10"')
assert f.contains('connectionTimeout="20000"')
@@ -78,7 +78,7 @@ def test_confluence_config_file(host):
assert f.contains('<property name="confluence.cluster.aws.host.header">ec2.amazonaws.com</property>')
assert f.contains('<property name="hibernate.connection.url">jdbc:postgresql://postgres-db.ap-southeast-2.rds.amazonaws.com:5432/confluence</property>')
assert f.contains('<property name="hibernate.connection.password">molecule_password</property>')
assert f.contains('<property name="hibernate.c3p0.preferredTestQuery">select 1;</property>')
assert (f.contains('<property name="hibernate.c3p0.preferredTestQuery">select 1;</property>') or f.contains('<property name="hibernate.hikari.registerMbeans">true</property>'))
assert f.contains('<property name="confluence.webapp.context.path">/foo</property>')
assert f.contains('<property name="confluence.cluster.aws.tag.key">my-cluster-tag</property>')

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,20 +1,22 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,19 +1,21 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -1,15 +1,14 @@
---
- name: Install Google Noto fonts for language coverage
yum:
ansible.builtin.dnf:
name:
- "google-noto-*"
- name: Link the language fonts into the JDK
# Not idiomatic, but cleaner than messing with nested lookups...
shell:
ansible.builtin.shell:
cmd: "ln -sf /usr/share/fonts/google-noto*/* {{ item }}/"
creates: "{{ item }}/NotoSansJavanese-Regular.ttf"
warn: false
with_items: "{{ atl_fonts_fallback_dirs }}"
changed_when: false # For Molecule idempotence check

View File

@@ -0,0 +1,5 @@
---
- name: Amazon Linux 2 no longer supported
fail:
msg: "Amazon Linux 2 is no longer supported; see README.md for supported operating systems/environments."

View File

@@ -1,7 +1,7 @@
---
- name: Create application directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -17,28 +17,25 @@
# Create symlink to force single (unclustered) Confluence to store
# shared-data and attachments in the shared drive.
- name: Symlink local attachments to shared storage
file:
src: "{{ item.0 }}"
dest: "{{ item.1 }}"
ansible.builtin.file:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
force: false
state: link
mode: 0750
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
vars:
- links:
- ["{{ atl_product_home_shared }}/", "{{ atl_product_home }}/shared-home"]
- ["{{ atl_product_home_shared }}/attachments/", "{{ atl_product_home }}/attachments"]
with_nested:
- "{{ links }}"
loop:
- {src: "{{ atl_product_home_shared }}/", dest: "{{ atl_product_home }}/shared-home"}
- {src: "{{ atl_product_home_shared }}/attachments/", dest: "{{ atl_product_home }}/attachments"}
- name: Create Tomcat server config
template:
ansible.builtin.template:
src: server.xml.j2
dest: "{{ atl_product_installation_versioned }}/conf/server.xml"
- name: Override JVM memory settings.
replace:
ansible.builtin.replace:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
regexp: "-{{ item }}\\d+m "
replace: "-{{ item }}{{ atl_jvm_heap }} "
@@ -47,43 +44,65 @@
- 'Xms'
- name: Set the Tomcat environment
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
insertafter: "EOF"
line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"'
- name: Configure login properties
template:
ansible.builtin.template:
src: seraph-config.xml.j2
dest: "{{ atl_product_installation_versioned }}/confluence/WEB-INF/classes/seraph-config.xml"
- name: Configure Confluence home directory
template:
ansible.builtin.template:
src: confluence-init.properties.j2
dest: "{{ atl_product_installation_versioned }}/confluence/WEB-INF/classes/confluence-init.properties"
- name: Create Confluence configuration
template:
ansible.builtin.template:
src: confluence.cfg.xml.j2
dest: "{{ atl_product_home }}/confluence.cfg.xml"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
- name: Limit permissions on the installation directory
file:
path: "{{ atl_product_installation_versioned }}"
- name: Limit permissions on the installer temp directory, recursively
ansible.builtin.file:
path: "{{ atl_installer_temp }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
recurse: true
with_items:
- "{{ atl_installer_temp }}"
- "{{ atl_product_installation_versioned }}"
- "{{ atl_product_version_cache_dir }}"
changed_when: false # For Molecule idempotence check
- name: Limit permissions on the installation directory, non-recursively
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
changed_when: false # For Molecule idempotence check
- name: Find top-level files/directories in installation directory, excluding working directories
ansible.builtin.find:
paths: "{{ atl_product_installation_versioned }}"
depth: 1
file_type: any
excludes: logs,temp,work
register: atl_product_installation_versioned_file_list
- name: Limit permissions on files and directories in the installation directory, recursively, excluding working directories
ansible.builtin.file:
path: "{{ item.path }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
recurse: "{{ item.isdir }}"
loop: "{{ atl_product_installation_versioned_file_list.files }}"
changed_when: false # For Molecule idempotence check
- name: Grant access to the product working directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -97,7 +116,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory owned by product so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -106,7 +125,7 @@
changed_when: false # For Molecule idempotence check
- name: Assert baseurl to same as atl_proxy_name
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_jdbc_user }}"
login_password: "{{ atl_jdbc_password }}"
@@ -126,11 +145,16 @@
ignore_errors: yes # For Molecule as it has no db test framework included
- name: Create JVM font fallback directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
with_items: "{{ atl_fonts_fallback_dirs }}"
- name: Install & configure distro language fonts
include_tasks: "{{ ansible_distribution|lower }}_fonts.yml"
- name: Install & configure Amazon-Linux-specific language fonts
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}-{{ ansible_distribution_version }}_fonts.yml"
when: ansible_distribution | lower == 'amazon'
- name: Install & configure Ubuntu/Debian-specific language fonts
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}_fonts.yml"
when: ansible_distribution | lower != 'amazon'

View File

@@ -1,15 +1,14 @@
---
- name: Install Google Noto fonts for language coverage
package:
ansible.builtin.package:
name:
- "fonts-noto"
- name: Link the language fonts into the JDK
# Not idiomatic, but cleaner than messing with nested lookups...
shell:
ansible.builtin.shell:
cmd: "ln -sf /usr/share/fonts/truetype/noto/* {{ item }}/"
creates: "{{ item }}/NotoSansJavanese-Regular.ttf"
warn: false
with_items: "{{ atl_fonts_fallback_dirs }}"
changed_when: false # For Molecule idempotence check

View File

@@ -10,13 +10,23 @@
<property name="confluence.database.choice">postgresql</property>
<property name="confluence.database.connection.type">database-type-standard</property>
<property name="hibernate.dialect">com.atlassian.confluence.impl.hibernate.dialect.PostgreSQLDialect</property>
<property name="struts.multipart.saveDir">${localHome}/temp</property>
<property name="webwork.multipart.saveDir">${localHome}/temp</property>
<property name="attachments.dir">${confluenceHome}/attachments</property>
<property name="hibernate.connection.driver_class">{{ atl_db_driver }}</property>
<property name="hibernate.connection.url">{{ atl_jdbc_url }}</property>
<property name="hibernate.connection.username">{{ atl_jdbc_user }}</property>
{% if atl_product_version.split(".")[:2] | join(".") is version('8.6', '>=') and atl_secretsmanager_aws_region is defined and atl_secretsmanager_aws_region != "" and atl_secretsmanager_aws_secret_id is defined and atl_secretsmanager_aws_secret_id != "" -%}
<property name="jdbc.password.decrypter.classname">com.atlassian.secrets.store.aws.AwsSecretsManagerStore</property>
<property name="hibernate.connection.password">{"region": "{{ atl_secretsmanager_aws_region }}", "secretId": "{{ atl_secretsmanager_aws_secret_id }}"}</property>
{% else -%}
<property name="hibernate.connection.password">{{ atl_jdbc_password | replace("&", "&amp;") }}</property>
{% endif -%}
{# Confluence versions 7.13 and newer #}
{%- if (atl_product_version.split(".")[0] | int() == 7 and atl_product_version.split(".")[1] | int() <= 13) or atl_product_version.split(".")[0] | int() < 7 -%}
<property name="hibernate.c3p0.min_size">{{ atl_db_poolminsize }}</property>
<property name="hibernate.c3p0.max_size">{{ atl_db_poolmaxsize }}</property>
<property name="hibernate.c3p0.timeout">{{ atl_db_timeout }}</property>
@@ -25,6 +35,13 @@
<property name="hibernate.c3p0.validate">{{ atl_db_validate }}</property>
<property name="hibernate.c3p0.acquire_increment">{{ atl_db_acquireincrement }}</property>
<property name="hibernate.c3p0.preferredTestQuery">{{ atl_db_preferredtestquery }}</property>
{%- else -%}
<property name="hibernate.hikari.idleTimeout">{{ atl_db_timeout }}</property>
<property name="hibernate.hikari.maximumPoolSize">{{ atl_db_poolmaxsize }}</property>
<property name="hibernate.hikari.minimumIdle">{{ atl_db_poolminsize }}</property>
<property name="hibernate.hikari.registerMbeans">true</property>
<property name="hibernate.connection.provider_class">{{ atl_db_provider_class | default('com.atlassian.confluence.impl.hibernate.DelegatingHikariConnectionProvider') }}</property>
{% endif %}
<property name="shared-home">{{ atl_product_home_shared }}</property>
<property name="confluence.cluster">true</property>
@@ -37,9 +54,24 @@
<property name="confluence.cluster.join.type">aws</property>
<property name="confluence.cluster.name">{{ atl_aws_stack_name }}</property>
<property name="confluence.cluster.ttl">1</property>
{% if atl_tomcat_contextpath is defined and atl_tomcat_contextpath != '' %}
<property name="confluence.webapp.context.path">/{{ atl_tomcat_contextpath }}</property>
{%- if atl_tomcat_contextpath is defined and atl_tomcat_contextpath != '' -%}
<property name="confluence.webapp.context.path">{{ atl_tomcat_contextpath }}</property>
{% endif %}
{# config specific to opensearch #}
{%- if atl_opensearch_endpoint is defined and atl_opensearch_endpoint != '' %}
{# if password provided set password and user but NOT region #}
{% if atl_opensearch_password is defined and atl_opensearch_password != '' %}
<property name="opensearch.password">{{ atl_opensearch_password }}</property>
<property name="opensearch.username">{{ atl_opensearch_user }}</property>
{%- else %}
<property name="opensearch.aws.region">{{ atl_hazelcast_network_aws_iam_region }}</property>
{% endif %}
<property name="opensearch.http.url">https://{{ atl_opensearch_endpoint }}</property>
<property name="search.platform">opensearch</property>
{%- endif %}
</properties>
</confluence-configuration>

View File

@@ -22,13 +22,10 @@
acceptCount="{{ atl_tomcat_acceptcount }}"
secure="{{ atl_tomcat_secure }}"
scheme="{{ atl_tomcat_scheme }}"
{% if atl_proxy_name is defined and atl_proxy_name != '' %}
proxyName="{{ atl_proxy_name }}"
{% endif %}
{% if atl_proxy_port is defined and atl_proxy_port != '' %}
proxyPort="{{ atl_proxy_port }}"
{% endif %}
{% if atl_proxy_name is defined and atl_proxy_name != '' -%}proxyName="{{ atl_proxy_name }}"
{% endif -%}
{% if atl_proxy_port is defined and atl_proxy_port != '' -%}proxyPort="{{ atl_proxy_port }}"
{% endif -%}
relaxedPathChars="[]|"
relaxedQueryChars="[]|{}^\`&quot;&lt;&gt;"
bindOnInit="false"
@@ -40,7 +37,6 @@
<Connector port="{{ atl_tomcat_redirectport }}"
protocol="{{ atl_tomcat_protocol }}"
connectionTimeout="{{ atl_tomcat_connectiontimeout }}"
relaxedPathChars="[]|"
relaxedQueryChars="[]|{}^\`&quot;&lt;&gt;"
maxHttpHeaderSize="65536"
@@ -50,6 +46,15 @@
compressableMimeType="text/html,text/xml,text/plain,text/css,application/json,application/javascript,application/x-javascript" />
{% endif %}
{% if atl_apptunnel_port is defined and atl_apptunnel_port != '' %}
<Connector port="{{ atl_apptunnel_port }}"
connectionTimeout="20000"
maxThreads="200"
minSpareThreads="10"
enableLookups="false"
acceptCount="10"
URIEncoding="UTF-8"/>
{% endif %}
<Engine name="Standalone"
defaultHost="localhost"

View File

@@ -1,60 +1,60 @@
---
- name: Create server config
template:
ansible.builtin.template:
src: server.xml.j2
dest: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/server.xml"
- name: Set the minimum heap size (Xms)
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
regexp: '^(.*)Xms(\d+\w)(\s.*)$'
line: '\1Xms{{ atl_jvm_heap }}\3'
backrefs: yes
- name: Set the maxmimum heap size (Xmx)
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
regexp: '^(.*)Xmx(\d+\w)(\s.*)$'
line: '\1Xmx{{ atl_jvm_heap }}\3'
backrefs: yes
- name: Set Crowd home directory in crowd-init.properties file
lineinfile:
- name: Set Crowd home directory in crowd-init.properties file
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/crowd-webapp/WEB-INF/classes/crowd-init.properties"
line: 'crowd.home={{ atl_product_home }}'
- name: Export CATALINA_OPTS in setenv.sh
lineinfile:
ansible.builtin.lineinfile:
path: '{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh'
line: 'export CATALINA_OPTS'
- name: CATALINA_OPTS to list
set_fact:
catalina_ops_list: "{{ catalina_ops_list|default([]) }} + {{ (item | trim | regex_replace('^-')).split(' -') }}"
ansible.builtin.set_fact:
catalina_ops_list: "{{ catalina_ops_list | default([]) + (item | trim | regex_replace('^-')).split(' -') }}"
loop:
- '{{ atl_catalina_opts }}'
- '{{ atl_catalina_opts_extra }}'
- name: CATALINA_OPTS unique and sorted
set_fact:
ansible.builtin.set_fact:
catalina_opts: "{{ query('flattened', catalina_ops_list) | reject('equalto', '') | unique | sort }}"
- name: Set CATALINA_OPTS in setenv.sh
blockinfile:
ansible.builtin.blockinfile:
block: "{{ lookup('template', 'templates/catalina_opts.j2') }}"
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
insertbefore: "^export CATALINA_OPTS$"
marker: "# {mark} ANSIBLE MANAGED CATALINA_OPTS"
- name: Set JAVA_HOME
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh"
insertafter: "EOF"
line: "export JAVA_HOME={{ atl_java_home }}"
- name: Create application directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -66,21 +66,51 @@
- "{{ atl_product_shared_plugins }}"
changed_when: false # For Molecule idempotence check
- name: Limit permissions on the installation directory
file:
path: "{{ atl_product_installation_versioned }}"
owner: "{{ atl_product_user }}"
group: "{{ atl_product_user }}"
- name: Limit permissions on the installer temp directory, recursively
ansible.builtin.file:
path: "{{ atl_installer_temp }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
recurse: true
with_items:
- "{{ atl_installer_temp }}"
- "{{ atl_product_installation_versioned }}"
- "{{ atl_product_version_cache_dir }}"
changed_when: false # For Molecule idempotence check
- name: Limit permissions on the installation directory, non-recursively
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
changed_when: false # For Molecule idempotence check
- name: Find top-level files/directories in installation directory, excluding tomcat
ansible.builtin.find:
paths: "{{ atl_product_installation_versioned }}"
depth: 1
file_type: any
excludes: apache-tomcat
register: atl_product_installation_versioned_file_list
- name: Find top-level files/directories in tomcat directory, excluding working directories
ansible.builtin.find:
paths: "{{ atl_product_installation_versioned }}/apache-tomcat"
depth: 1
file_type: any
excludes: logs,temp,work
register: atl_product_installation_versioned_tomcat_file_list
- name: Limit permissions on files and directories in the installation and tomcat directories, recursively, excluding working directories
ansible.builtin.file:
path: "{{ item.path }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
recurse: "{{ item.isdir }}"
loop: "{{ atl_product_installation_versioned_file_list.files + atl_product_installation_versioned_tomcat_file_list.files }}"
changed_when: false # For Molecule idempotence check
- name: Grant access to the product working directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -94,7 +124,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory owned by product so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -103,7 +133,7 @@
changed_when: false # For Molecule idempotence check
- name: Symlink Crowd shared home directory
file:
ansible.builtin.file:
src: "{{ atl_product_home_shared }}"
dest: "{{ atl_product_home }}/shared"
state: link
@@ -112,7 +142,7 @@
changed_when: false # For Molecule idempotence check
- name: Assert baseurl to same as atl_proxy_name
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_jdbc_user }}"
login_password: "{{ atl_jdbc_password }}"
@@ -131,32 +161,32 @@
ignore_errors: yes # For Molecule as it has no db test framework included
- name: Check that crowd.cfg.xml exists
stat:
ansible.builtin.stat:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
register: crowd_cfg_stat_result
- block:
- name: Assert JDBC password to same as atl_jdbc_password
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.connection.password']"
value: "{{ atl_jdbc_password }}"
- name: Assert JDBC url to same as atl_jdbc_url
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.connection.url']"
value: "{{ atl_jdbc_url }}?reWriteBatchedInserts=true"
- name: Assert hibernate.c3p0.max_size to same as atl_db_poolmaxsize
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.c3p0.max_size']"
value: "{{ atl_db_poolmaxsize }}"
when: atl_db_poolmaxsize is defined
- name: Assert hibernate.c3p0.min_size to same as atl_db_poolminsize
xml:
community.general.xml:
path: "{{ atl_product_home_shared }}/crowd.cfg.xml"
xpath: "/application-configuration/properties/property[@name='hibernate.c3p0.min_size']"
value: "{{ atl_db_poolminsize }}"
@@ -165,6 +195,6 @@
when: crowd_cfg_stat_result.stat.exists
- name: Remove crowd.xml to prevent duplicates from appearing in cluster reporting
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/Catalina/localhost/crowd.xml"
state: absent

View File

@@ -34,7 +34,8 @@
unpackWARs="true">
<Context path="{{ atl_tomcat_contextpath }}"
docBase="../../crowd-webapp"
debug="0">
debug="0"
useHttpOnly="true">
<Manager pathname="">
</Manager>
</Context>

View File

@@ -1,7 +1,7 @@
---
- name: Create application DB user
postgresql_user:
community.postgresql.postgresql_user:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -11,7 +11,7 @@
expires: 'infinity'
- name: Collect dbcluster db_names
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -22,7 +22,7 @@
- block:
- name: Update root privs for new user
postgresql_privs:
community.postgresql.postgresql_privs:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -33,7 +33,7 @@
# RDS does not allow changing the collation on an existing DB, it only allows collation change on creation of db. If the db already exists, we need the “create new application database” task to be skipped, idempotence can not be relied upon as we cant be certain the collation of the existing db
- name: Create new application database
postgresql_db:
community.postgresql.postgresql_db:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -45,13 +45,13 @@
lc_ctype: "{{ atl_jdbc_ctype }}"
template: "{{ atl_jdbc_template }}"
register: db_created
when: "atl_jdbc_db_name not in (dbcluster_db_names.query_result | map(attribute='datname') )"
when: "atl_jdbc_db_name not in (dbcluster_db_names.query_result | map(attribute='datname'))"
tags:
- new_only
- name: Assert ownership of public schema
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -59,7 +59,7 @@
query: "ALTER SCHEMA public OWNER to {{ atl_db_root_user }};"
- name: Grant privs to root user on public schema
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"
@@ -67,7 +67,7 @@
query: "GRANT ALL ON SCHEMA public TO {{ atl_db_root_user }};"
- name: Grant privs to application user on public schema
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_db_root_user }}"
login_password: "{{ atl_db_root_password }}"

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -20,12 +20,12 @@
pre_tasks:
- name: Create base dir
file:
ansible.builtin.file:
path: '/opt/atlassian/bin'
state: directory
- name: Install git
package:
ansible.builtin.package:
name: git
roles:

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -2,19 +2,19 @@
- name: Fetch the DIY backups repository
git:
ansible.builtin.git:
repo: "{{ atl_diy_backup_repo }}"
dest: "{{ atl_diy_backup_dir }}"
version: "master"
- name: Configure DIY backup for BB on AWS
template:
ansible.builtin.template:
src: "bitbucket.diy-backup.vars.sh.j2"
dest: "{{ atl_diy_backup_dir }}/bitbucket.diy-backup.vars.sh"
mode: 0640
- name: Install backup wrapper script
template:
ansible.builtin.template:
src: "run-backup.j2"
dest: "{{ atl_installation_base }}/bin/run-backup"
mode: 0750

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -30,11 +30,11 @@
pre_tasks:
- name: Create shared home
file:
ansible.builtin.file:
path: '/media/atl/jira/shared/'
state: directory
- name: Create jira-config.properties to check copy
copy:
ansible.builtin.copy:
dest: '/media/atl/jira/shared/jira-config.properties'
content: "jira.projectkey.warning = testwarning"
force: false # For idempotency check

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,23 +1,25 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
ulimits:
- nofile:262144:262144
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
options:
skip-tags: runtime_pkg
inventory:

View File

@@ -2,29 +2,29 @@
- name: Create database config
template:
ansible.builtin.template:
src: dbconfig.xml.j2
dest: "{{ atl_product_home }}/dbconfig.xml"
owner: "{{ atl_product_user }}"
- name: Create cluster config
template:
ansible.builtin.template:
src: cluster.properties.j2
dest: "{{ atl_product_home }}/cluster.properties"
owner: "{{ atl_product_user }}"
- name: Create server config
template:
ansible.builtin.template:
src: server.xml.j2
dest: "{{ atl_product_installation_versioned }}/conf/server.xml"
- name: Check for a jira-config.properties in the shared home
stat:
ansible.builtin.stat:
path: "{{ atl_product_home_shared }}/jira-config.properties"
register: jira_config_properties
- name: Copy jira-config.properties if exists
copy:
ansible.builtin.copy:
remote_src: true
src: "{{ atl_product_home_shared }}/jira-config.properties"
dest: "{{ atl_product_home }}/jira-config.properties"
@@ -35,7 +35,7 @@
- name: Override JVM memory settings.
# Ugly but necessary as the product installs this file so we need to make the change here.
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
backrefs: true
regexp: "^{{ item }}="
@@ -45,25 +45,25 @@
- 'JVM_MAXIMUM_MEMORY'
- name: Set Jira home directory
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
regexp: "JIRA_HOME="
line: 'JIRA_HOME="{{ atl_product_home }}"'
- name: Set the Tomcat environment
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
insertafter: "EOF"
line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"'
- name: Set support recommended JVM args
lineinfile:
ansible.builtin.lineinfile:
path: "{{ atl_product_installation_versioned }}/bin/setenv.sh"
regexp: "JVM_SUPPORT_RECOMMENDED_ARGS="
line: 'JVM_SUPPORT_RECOMMENDED_ARGS="{{ atl_jvm_opts }}"'
- name: Create application directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0750
@@ -75,22 +75,43 @@
- "{{ atl_product_shared_plugins }}"
changed_when: false # For Molecule idempotence check
- name: Limit permissions on the installation directory
file:
path: "{{ atl_product_installation_versioned }}"
- name: Limit permissions on the installer temp directory, recursively
ansible.builtin.file:
path: "{{ atl_installer_temp }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
recurse: true
with_items:
- "{{ atl_installer_temp }}"
- "{{ atl_product_installation_versioned }}"
- "{{ atl_product_version_cache_dir }}"
changed_when: false # For Molecule idempotence check
- name: Limit permissions on the installation directory, non-recursively
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
changed_when: false # For Molecule idempotence check
- name: Find top-level files/directories in installation directory, excluding working directories
ansible.builtin.find:
paths: "{{ atl_product_installation_versioned }}"
depth: 1
file_type: any
excludes: logs,temp,work
register: atl_product_installation_versioned_file_list
- name: Limit permissions on files and directories in the installation directory, recursively, excluding working directories
ansible.builtin.file:
path: "{{ item.path }}"
owner: "root"
group: "root"
mode: "u=rwX,g=rX,o=rX"
recurse: "{{ item.isdir }}"
loop: "{{ atl_product_installation_versioned_file_list.files }}"
changed_when: false # For Molecule idempotence check
- name: Grant access to the product working directories
file:
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -104,7 +125,7 @@
changed_when: false # For Molecule idempotence check
- name: Create conf/Catalina directory owned by product so catalina.out logging works
file:
ansible.builtin.file:
path: "{{ atl_product_installation_versioned }}/conf/Catalina"
state: directory
mode: "u=rwX,g=rX,o-rwx"
@@ -113,7 +134,7 @@
changed_when: false # For Molecule idempotence check
- name: Assert baseurl to same as atl_proxy_name
postgresql_query:
community.postgresql.postgresql_query:
login_host: "{{ atl_db_host }}"
login_user: "{{ atl_jdbc_user }}"
login_password: "{{ atl_jdbc_password }}"

View File

@@ -8,7 +8,12 @@
<jdbc-datasource>
<url>{{ atl_jdbc_url }}</url>
<username>{{ atl_jdbc_user }}</username>
{% if atl_product_version.split(".")[:2] | join(".") is version('9.11', '>=') and atl_secretsmanager_aws_region is defined and atl_secretsmanager_aws_region != "" and atl_secretsmanager_aws_secret_id is defined and atl_secretsmanager_aws_secret_id != "" -%}
<atlassian-password-cipher-provider>com.atlassian.secrets.store.aws.AwsSecretsManagerStore</atlassian-password-cipher-provider>
<password>{"region": "{{ atl_secretsmanager_aws_region }}", "secretId": "{{ atl_secretsmanager_aws_secret_id }}"}</password>
{% else -%}
<password>{{ atl_jdbc_password | replace("&", "&amp;") }}</password>
{% endif -%}
<driver-class>{{ atl_db_driver }}</driver-class>
{% if 'postgres' in atl_db_type %}
<connection-properties>tcpKeepAlive={{ atl_db_keepalive }};socketTimeout={{ atl_db_sockettimeout }}</connection-properties>

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,19 +1,21 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
groups:
- aws_node_local
platform: linux/amd64
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
groups:
- aws_node_local
platform: linux/amd64
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
inventory:
links:
group_vars: ../../../../group_vars/

View File

@@ -0,0 +1,5 @@
---
- name: Amazon Linux 2 no longer supported
fail:
msg: "Amazon Linux 2 is no longer supported; see README.md for supported operating systems/environments."

View File

@@ -0,0 +1,16 @@
---
- name: Install Amazon-Linux-2023-specific support packages
ansible.builtin.dnf:
name:
- dejavu-sans-fonts
- file
- git
- libxml2
- shadow-utils
- name: Uninstall curl-minimal (conflicts with curl)
ansible.builtin.dnf:
name: curl-minimal
state: absent
autoremove: no

View File

@@ -1,20 +0,0 @@
---
- name: Install Amazon-Linux-specific support packages
yum:
name:
- dejavu-sans-fonts
- file
- git-{{ git_version }}
- libxml2
- shadow-utils
- name: Limit the SSH ciphers
lineinfile:
path: "/etc/ssh/sshd_config"
# Drop insecure ciphers, currently 3des-cbc only. You can get the
# full list with `sshd -T | grep -i ciphers`
regexp: '^[Cc]iphers'
line: "Ciphers chacha20-poly1305@openssh.com,aes128-ctr,aes192-ctr,aes256-ctr,aes128-gcm@openssh.com,aes256-gcm@openssh.com,aes128-cbc,aes192-cbc,aes256-cbc,blowfish-cbc,cast128-cbc"
insertbefore: "BOF"
ignore_errors: yes # No sshd == no problem

View File

@@ -2,33 +2,37 @@
# Note: Try and limit these to packages that are distro-specific, and
# place commonly-named ones below.
- name: Install distro-specific prerequisites
include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Install Amazon-Linux-specific prerequisites
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}-{{ ansible_distribution_version }}.yml"
when: ansible_distribution | lower == 'amazon'
- name: Install Ubuntu/Debian-specific prerequisites
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}.yml"
when: ansible_distribution | lower != 'amazon'
- name: Install common support packages
package:
ansible.builtin.package:
name:
- jq
- tar
- curl
- unzip
- fontconfig
- python-psycopg2
- name: Create product group
group:
ansible.builtin.group:
name: "{{ atl_product_user }}"
gid: "{{ atl_product_user_uid }}"
- name: Create product user
user:
ansible.builtin.user:
name: "{{ atl_product_user }}"
uid: "{{ atl_product_user_uid }}"
group: "{{ atl_product_user }}"
comment: "Product runtime user"
- name: Stop systemd-cleanup deleting the jvm socket file
copy:
ansible.builtin.copy:
src: java.conf
dest: "/usr/lib/tmpfiles.d/java.conf"
owner: root
@@ -37,7 +41,7 @@
register: systemd_config_changed
- name: Force systemd to reload daemon configuration
systemd:
ansible.builtin.systemd_service:
daemon_reload: yes
when:
- systemd_config_changed is defined

View File

@@ -1,9 +1,8 @@
---
- name: Install common Ubuntu support packages
apt:
ansible.builtin.apt:
name:
- python3-psycopg2
- libxml2-utils
- git
- fontconfig

View File

@@ -1,7 +1,7 @@
---
- name: Create mountpoint
file:
ansible.builtin.file:
state: directory
path: "{{ atl_shared_mountpoint }}"
mode: 0755
@@ -9,7 +9,7 @@
group: "{{ atl_product_user }}"
- name: Enable mountpoint in fstab
mount:
ansible.posix.mount:
src: "{{ atl_fileserver_host }}:{{ atl_nfs_target }}"
path: "{{ atl_nfs_mountpoint }}"
fstype: nfs

View File

@@ -1,6 +1,6 @@
---
- name: Restart NFS
service:
ansible.builtin.service:
name: "nfs.service"
state: restarted

View File

@@ -0,0 +1,5 @@
---
- name: Amazon Linux 2 no longer supported
fail:
msg: "Amazon Linux 2 is no longer supported; see README.md for supported operating systems/environments."

View File

@@ -0,0 +1,18 @@
---
- name: Install Amazon-Linux-2023-specific NFS packages
ansible.builtin.dnf:
name:
- nfs-utils
- name: Create nfs.service symlink to nfs-server.service
ansible.builtin.file:
src: /usr/lib/systemd/system/nfs-server.service
dest: /usr/lib/systemd/system/nfs.service
owner: root
group: root
state: link
- name: Reload systemd services
ansible.builtin.systemd_service:
daemon_reload: yes

View File

@@ -1,6 +0,0 @@
---
- name: Install Amazon-Linux-specific NFS packages
yum:
name:
- nfs-utils

View File

@@ -1,16 +1,20 @@
---
- name: Install distro-specific NFS packages
include_tasks: "{{ ansible_distribution|lower }}.yml"
- name: Install Amazon-Linux-specific NFS packages
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}-{{ ansible_distribution_version }}.yml"
when: ansible_distribution | lower == 'amazon'
- name: Install Ubuntu/Debian-specific NFS packages
ansible.builtin.include_tasks: "{{ ansible_distribution | lower }}.yml"
when: ansible_distribution | lower != 'amazon'
- name: Create mountpoint
file:
ansible.builtin.file:
path: "{{ atl_shared_mountpoint }}"
state: directory
- name: Setup the disk partition
parted:
community.general.parted:
device: "{{ atl_nfs_server_device }}"
label: gpt
name: "{{ atl_nfs_fs_label }}"
@@ -22,7 +26,7 @@
- name: Create the filesystem
filesystem:
community.general.filesystem:
dev: "{{ atl_nfs_server_device }}"
fstype: "{{ atl_nfs_fs_type }}"
opts: "-L {{ atl_nfs_fs_label }}"
@@ -30,7 +34,7 @@
- new_only
- name: Setup fstab and mount the filesystem
mount:
ansible.posix.mount:
path: "{{ atl_shared_mountpoint }}"
src: "LABEL={{ atl_nfs_fs_label }}"
fstype: "{{ atl_nfs_fs_type }}"
@@ -40,7 +44,7 @@
- name: Create the shared home
file:
ansible.builtin.file:
path: "{{ atl_shared_mountpoint }}/{{ atl_product_user }}/shared"
state: directory
owner: "{{ atl_product_user }}"
@@ -51,14 +55,14 @@
- name: Create the NFS export file
template:
ansible.builtin.template:
src: "media-atl.exports.j2"
dest: "/etc/exports.d/20-media-atl.exports"
notify:
- Restart NFS
- name: Enable NFS
service:
ansible.builtin.service:
name: nfs.service
enabled: true
state: started

View File

@@ -1,7 +1,7 @@
---
- name: Install Ubuntu-specific NFS packages
apt:
ansible.builtin.apt:
name:
- nfs-kernel-server
- libnfs-utils

View File

@@ -6,8 +6,8 @@ FROM {{ item.registry.url }}/{{ item.image }}
FROM {{ item.image }}
{% endif %}
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python3 sudo bash ca-certificates file && apt-get clean; \
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python3.11 sudo python3.11-devel python*-dnf bash && dnf clean all; \
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \

View File

@@ -1,14 +1,14 @@
---
dependency:
name: galaxy
driver:
name: docker
platforms:
- name: amazon_linux2
image: amazonlinux:2
- name: amazon_linux2023
image: amazonlinux:2023
- name: ubuntu_lts
image: ubuntu:bionic
image: ubuntu:jammy
provisioner:
name: ansible
env:
ANSIBLE_COLLECTIONS_PATH: "../../../../"
verifier:
name: testinfra

Some files were not shown because too many files have changed in this diff Show More