From 58632c27f986a5481707a87ff32ace2f66579c57 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:24:31 +1000 Subject: [PATCH 01/93] DCD-686: Initial manifest download logic. --- group_vars/aws_node_local.yml | 2 ++ .../molecule/default/Dockerfile.j2 | 14 ++++++++++ .../molecule/default/molecule.yml | 20 ++++++++++++++ .../molecule/default/playbook.yml | 12 +++++++++ .../molecule/default/tests/test_default.py | 19 ++++++++++++++ roles/restore_metadata/tasks/main.yml | 26 +++++++++++++++++++ 6 files changed, 93 insertions(+) create mode 100644 roles/restore_metadata/molecule/default/Dockerfile.j2 create mode 100644 roles/restore_metadata/molecule/default/molecule.yml create mode 100644 roles/restore_metadata/molecule/default/playbook.yml create mode 100644 roles/restore_metadata/molecule/default/tests/test_default.py create mode 100644 roles/restore_metadata/tasks/main.yml diff --git a/group_vars/aws_node_local.yml b/group_vars/aws_node_local.yml index cd6ea3b..6db2a6d 100644 --- a/group_vars/aws_node_local.yml +++ b/group_vars/aws_node_local.yml @@ -126,3 +126,5 @@ atl_rds_instance_class: "{{ lookup('env', 'ATL_RDS_INSTANCE_CLASS') }}" atl_rds_multi_az: "{{ lookup('env', 'ATL_RDS_MULTI_AZ') }}" atl_rds_subnet_group_name: "{{ lookup('env', 'ATL_RDS_SUBNET_GROUP_NAME') }}" atl_rds_security_group: "{{ lookup('env', 'ATL_RDS_SECURITY_GROUP') }}" + +atl_backup_manifest_url: "{{ lookup('env', 'ATL_BACKUP_MANIFEST_URL) }}" diff --git a/roles/restore_metadata/molecule/default/Dockerfile.j2 b/roles/restore_metadata/molecule/default/Dockerfile.j2 new file mode 100644 index 0000000..e6aa95d --- /dev/null +++ b/roles/restore_metadata/molecule/default/Dockerfile.j2 @@ -0,0 +1,14 @@ +# Molecule managed + +{% if item.registry is defined %} +FROM {{ item.registry.url }}/{{ item.image }} +{% else %} +FROM {{ item.image }} +{% endif %} + +RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ + elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ + elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ + elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ + elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ + elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/restore_metadata/molecule/default/molecule.yml b/roles/restore_metadata/molecule/default/molecule.yml new file mode 100644 index 0000000..c6a6d26 --- /dev/null +++ b/roles/restore_metadata/molecule/default/molecule.yml @@ -0,0 +1,20 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint +platforms: + - name: amazon_linux2 + image: amazonlinux:2 + - name: ubuntu_lts + image: ubuntu:bionic +provisioner: + name: ansible + lint: + name: ansible-lint +verifier: + name: testinfra + lint: + name: flake8 diff --git a/roles/restore_metadata/molecule/default/playbook.yml b/roles/restore_metadata/molecule/default/playbook.yml new file mode 100644 index 0000000..6e24bad --- /dev/null +++ b/roles/restore_metadata/molecule/default/playbook.yml @@ -0,0 +1,12 @@ +--- +- name: Converge + hosts: all + vars: + atl_product_user: "testuser" + atl_product_home: "/opt/atlassian/product" + atl_product_installation_base: "/opt/atlassian/product/install" + atl_installer_temp: "/opt/atlassian/temp" + atl_product_home_shared: "/media/atl/jira/shared" + atl_product_shared_plugins: "/media/atl/jira/shared/plugins/" + roles: + - role: restore_metadata diff --git a/roles/restore_metadata/molecule/default/tests/test_default.py b/roles/restore_metadata/molecule/default/tests/test_default.py new file mode 100644 index 0000000..b29ef83 --- /dev/null +++ b/roles/restore_metadata/molecule/default/tests/test_default.py @@ -0,0 +1,19 @@ +import os +import pytest + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') + + +def test_user_created(host): + user = host.user('testuser') + assert user.exists + + +@pytest.mark.parametrize('exe', [ + '/usr/bin/git' +]) +def test_package_exes(host, exe): + assert host.file(exe).exists diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml new file mode 100644 index 0000000..a6bd85f --- /dev/null +++ b/roles/restore_metadata/tasks/main.yml @@ -0,0 +1,26 @@ +--- + +- block: + + - name: Parse the manifest URL + set_fact: + atl_backup_manifest_scheme: "{{ atl_backup_manifest_url | urlsplit('scheme') }}" + atl_backup_manifest_bucket: "{{ atl_backup_manifest_url | urlsplit('hostname') }}" + atl_backup_manifest_path: "{{ atl_backup_manifest_url | urlsplit('path') }}" + atl_backup_manifest_filename: "{{ atl_backup_manifest_path | basename}}" + atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_filename }}" + + - name: Fetch the manifest from S3 + aws_s3: + bucket: "{{ atl_backup_manifest | urlsplit('hostname' }}" + object: "{{ atl_backup_manifest | urlsplit('path' }}" + dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest | urlsplit('path' | }}" + when: atl_backup_manifest_scheme == 's3' + + - name: Fetch the manifest from remote host + get_url: + url: "{{ atl_backup_manifest_url }}" + dest: "{{ atl_backup_manifest_dest }}" + when: atl_backup_manifest_scheme != 's3' + + when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From 24d8a8f16cf5b6a0f0c7100bccabeb192848856e Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:34:25 +1000 Subject: [PATCH 02/93] DCD-686: Fix missing quote. --- group_vars/aws_node_local.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/group_vars/aws_node_local.yml b/group_vars/aws_node_local.yml index 6db2a6d..69285f9 100644 --- a/group_vars/aws_node_local.yml +++ b/group_vars/aws_node_local.yml @@ -127,4 +127,4 @@ atl_rds_multi_az: "{{ lookup('env', 'ATL_RDS_MULTI_AZ') }}" atl_rds_subnet_group_name: "{{ lookup('env', 'ATL_RDS_SUBNET_GROUP_NAME') }}" atl_rds_security_group: "{{ lookup('env', 'ATL_RDS_SECURITY_GROUP') }}" -atl_backup_manifest_url: "{{ lookup('env', 'ATL_BACKUP_MANIFEST_URL) }}" +atl_backup_manifest_url: "{{ lookup('env', 'ATL_BACKUP_MANIFEST_URL') }}" From cfca695c31107175e4954de5d365e8114e90f614 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:36:49 +1000 Subject: [PATCH 03/93] DCD-686: set_fact variables don't stack. --- roles/restore_metadata/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index a6bd85f..848e1de 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -8,7 +8,7 @@ atl_backup_manifest_bucket: "{{ atl_backup_manifest_url | urlsplit('hostname') }}" atl_backup_manifest_path: "{{ atl_backup_manifest_url | urlsplit('path') }}" atl_backup_manifest_filename: "{{ atl_backup_manifest_path | basename}}" - atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_filename }}" + atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ {{ atl_backup_manifest_path | basename}}" - name: Fetch the manifest from S3 aws_s3: From 38b1441d4a95faa193f27a9be7ceb818401cc4a2 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:37:51 +1000 Subject: [PATCH 04/93] DCD-686: Typo --- roles/restore_metadata/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 848e1de..7f7c30f 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -8,7 +8,7 @@ atl_backup_manifest_bucket: "{{ atl_backup_manifest_url | urlsplit('hostname') }}" atl_backup_manifest_path: "{{ atl_backup_manifest_url | urlsplit('path') }}" atl_backup_manifest_filename: "{{ atl_backup_manifest_path | basename}}" - atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ {{ atl_backup_manifest_path | basename}}" + atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_path | basename}}" - name: Fetch the manifest from S3 aws_s3: From 79fd9e5db940c224dd2dc5924225701f82dc4c07 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:47:10 +1000 Subject: [PATCH 05/93] DCD-686: More tweaks to manifest handling. --- roles/restore_metadata/tasks/main.yml | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 7f7c30f..c532608 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -4,17 +4,19 @@ - name: Parse the manifest URL set_fact: - atl_backup_manifest_scheme: "{{ atl_backup_manifest_url | urlsplit('scheme') }}" - atl_backup_manifest_bucket: "{{ atl_backup_manifest_url | urlsplit('hostname') }}" - atl_backup_manifest_path: "{{ atl_backup_manifest_url | urlsplit('path') }}" - atl_backup_manifest_filename: "{{ atl_backup_manifest_path | basename}}" - atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_path | basename}}" + atl_backup_manifest_url: "{{ atl_backup_manifest_url | urlsplit }}" + + - name: Extract manifest file information + set_fact: + atl_backup_manifest_bucket: "{{ atl_backup_manifest_url.hostname }}" + atl_backup_manifest_path: "{{ atl_backup_manifest_url.path }}" + atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_url.path | basename }}" - name: Fetch the manifest from S3 aws_s3: - bucket: "{{ atl_backup_manifest | urlsplit('hostname' }}" - object: "{{ atl_backup_manifest | urlsplit('path' }}" - dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest | urlsplit('path' | }}" + bucket: "{{ atl_backup_manifest_bucket }}" + object: "{{ atl_backup_manifest_path }}" + dest: "{{ atl_backup_manifest_dest }}" when: atl_backup_manifest_scheme == 's3' - name: Fetch the manifest from remote host From 347bdab24db209922397797e5ff3f029ea190023 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:48:47 +1000 Subject: [PATCH 06/93] DCD-686: Correct scheme lookup. --- roles/restore_metadata/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index c532608..74de3d2 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -17,12 +17,12 @@ bucket: "{{ atl_backup_manifest_bucket }}" object: "{{ atl_backup_manifest_path }}" dest: "{{ atl_backup_manifest_dest }}" - when: atl_backup_manifest_scheme == 's3' + when: atl_backup_manifest_url.scheme == 's3' - name: Fetch the manifest from remote host get_url: url: "{{ atl_backup_manifest_url }}" dest: "{{ atl_backup_manifest_dest }}" - when: atl_backup_manifest_scheme != 's3' + when: atl_backup_manifest_url.scheme != 's3' when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From 8c3d7ce9f4d17dd00de86239bd65572a957744d6 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:50:14 +1000 Subject: [PATCH 07/93] DCD-686: Add file permissions. --- roles/restore_metadata/tasks/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 74de3d2..5c13946 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -23,6 +23,7 @@ get_url: url: "{{ atl_backup_manifest_url }}" dest: "{{ atl_backup_manifest_dest }}" + mode: 0600 when: atl_backup_manifest_url.scheme != 's3' when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From a3b3460f97b26f33dad746a3d15fa09481ae1410 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:51:14 +1000 Subject: [PATCH 08/93] DCD-686: Add file permissions to s3 fetch. --- roles/restore_metadata/tasks/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 5c13946..8afd08b 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -17,6 +17,7 @@ bucket: "{{ atl_backup_manifest_bucket }}" object: "{{ atl_backup_manifest_path }}" dest: "{{ atl_backup_manifest_dest }}" + mode: 0600 when: atl_backup_manifest_url.scheme == 's3' - name: Fetch the manifest from remote host From 89038fb7c055b03b3a980b569bc6691a19c56667 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 11:52:34 +1000 Subject: [PATCH 09/93] DCD-686: Fix S3 download. --- roles/restore_metadata/tasks/main.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 8afd08b..f23f1f0 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -14,17 +14,16 @@ - name: Fetch the manifest from S3 aws_s3: + mode: get bucket: "{{ atl_backup_manifest_bucket }}" object: "{{ atl_backup_manifest_path }}" dest: "{{ atl_backup_manifest_dest }}" - mode: 0600 when: atl_backup_manifest_url.scheme == 's3' - name: Fetch the manifest from remote host get_url: url: "{{ atl_backup_manifest_url }}" dest: "{{ atl_backup_manifest_dest }}" - mode: 0600 when: atl_backup_manifest_url.scheme != 's3' when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From 14e2fdf25b673fc6548c4e07b198a44bf32edf9f Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 13:24:07 +1000 Subject: [PATCH 10/93] DCD-686: Ensure temp directory is present before downloads. --- roles/restore_metadata/tasks/main.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index f23f1f0..5264854 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -2,6 +2,15 @@ - block: + - name: Ensure temp directory is present + file: + path: "{{ atl_installer_temp }}" + state: directory + mode: 0750 + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + changed_when: false # For Molecule idempotence check + - name: Parse the manifest URL set_fact: atl_backup_manifest_url: "{{ atl_backup_manifest_url | urlsplit }}" From 46adc9af3f9212f2914004adc72ee16b8eb30b22 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 13:29:21 +1000 Subject: [PATCH 11/93] DCD-686: Move to Python 3.7 and update packages to support AWS operations. --- Pipfile | 4 +- Pipfile.lock | 501 +++++++++++++++++++------------------------- bin/install-ansible | 9 +- 3 files changed, 225 insertions(+), 289 deletions(-) diff --git a/Pipfile b/Pipfile index 0baa144..4d8129d 100644 --- a/Pipfile +++ b/Pipfile @@ -5,6 +5,8 @@ name = "pypi" [packages] ansible = "==2.7.11" +boto3 = "==1.9.241" +botocore = "==1.12.241" [dev-packages] molecule = "==2.20.1" @@ -14,4 +16,4 @@ taskcat = "*" Jinja2 = "*" [requires] -python_version = "2.7" +python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 12753dd..976eed7 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "d19b07115cf0a0e5ea9ce4283e43ee9c0efa52683080b730b8bf943ed87861e8" + "sha256": "fe304fca8752522c4a630677978735c57d39f161e7d0046ea128a21c7c28e373" }, "pipfile-spec": 6, "requires": { - "python_version": "2.7" + "python_version": "3.7" }, "sources": [ { @@ -25,34 +25,47 @@ }, "asn1crypto": { "hashes": [ - "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87", - "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49" + "sha256:d02bf8ea1b964a5ff04ac7891fe3a39150045d1e5e4fe99273ba677d11b92a04", + "sha256:f822954b90c4c44f002e2cd46d636ab630f1fe4df22c816a82b66505c404eb2a" ], - "version": "==0.24.0" + "version": "==1.0.0" }, "bcrypt": { "hashes": [ - "sha256:0ba875eb67b011add6d8c5b76afbd92166e98b1f1efab9433d5dc0fafc76e203", - "sha256:21ed446054c93e209434148ef0b362432bb82bbdaf7beef70a32c221f3e33d1c", - "sha256:28a0459381a8021f57230954b9e9a65bb5e3d569d2c253c5cac6cb181d71cf23", - "sha256:2aed3091eb6f51c26b7c2fad08d6620d1c35839e7a362f706015b41bd991125e", - "sha256:2fa5d1e438958ea90eaedbf8082c2ceb1a684b4f6c75a3800c6ec1e18ebef96f", - "sha256:3a73f45484e9874252002793518da060fb11eaa76c30713faa12115db17d1430", - "sha256:3e489787638a36bb466cd66780e15715494b6d6905ffdbaede94440d6d8e7dba", - "sha256:44636759d222baa62806bbceb20e96f75a015a6381690d1bc2eda91c01ec02ea", - "sha256:678c21b2fecaa72a1eded0cf12351b153615520637efcadc09ecf81b871f1596", - "sha256:75460c2c3786977ea9768d6c9d8957ba31b5fbeb0aae67a5c0e96aab4155f18c", - "sha256:8ac06fb3e6aacb0a95b56eba735c0b64df49651c6ceb1ad1cf01ba75070d567f", - "sha256:8fdced50a8b646fff8fa0e4b1c5fd940ecc844b43d1da5a980cb07f2d1b1132f", - "sha256:9b2c5b640a2da533b0ab5f148d87fb9989bf9bcb2e61eea6a729102a6d36aef9", - "sha256:a9083e7fa9adb1a4de5ac15f9097eb15b04e2c8f97618f1b881af40abce382e1", - "sha256:b7e3948b8b1a81c5a99d41da5fb2dc03ddb93b5f96fcd3fd27e643f91efa33e1", - "sha256:b998b8ca979d906085f6a5d84f7b5459e5e94a13fc27c28a3514437013b6c2f6", - "sha256:dd08c50bc6f7be69cd7ba0769acca28c846ec46b7a8ddc2acf4b9ac6f8a7457e", - "sha256:de5badee458544ab8125e63e39afeedfcf3aef6a6e2282ac159c95ae7472d773", - "sha256:ede2a87333d24f55a4a7338a6ccdccf3eaa9bed081d1737e0db4dbd1a4f7e6b6" + "sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89", + "sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42", + "sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294", + "sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161", + "sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31", + "sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5", + "sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c", + "sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0", + "sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de", + "sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e", + "sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052", + "sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09", + "sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105", + "sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133", + "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", + "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" ], - "version": "==3.1.6" + "version": "==3.1.7" + }, + "boto3": { + "hashes": [ + "sha256:60e711f1113be926bcec1cfe62fa336438d021ce834f4a5228beead3b4bc5142", + "sha256:8c9b9b2422c1baa84c0f331ee86ac4d265e1e7d321ce7ba58dbb863585c2191f" + ], + "index": "pypi", + "version": "==1.9.241" + }, + "botocore": { + "hashes": [ + "sha256:897415ec68b2cbb65a7d32965c456d332bb2eb936e533c9ad6064cd15e67c0c1", + "sha256:e35c2e6b8946be9063d7988b19dea2b6136b80c0e3469b6a076c574d5abca6b3" + ], + "index": "pypi", + "version": "==1.12.241" }, "cffi": { "hashes": [ @@ -108,23 +121,13 @@ ], "version": "==2.7" }, - "enum34": { + "docutils": { "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", + "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", + "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" + "version": "==0.15.2" }, "jinja2": { "hashes": [ @@ -133,6 +136,13 @@ ], "version": "==2.10.1" }, + "jmespath": { + "hashes": [ + "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", + "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" + ], + "version": "==0.9.4" + }, "markupsafe": { "hashes": [ "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", @@ -168,10 +178,10 @@ }, "paramiko": { "hashes": [ - "sha256:69c219df239775800a2589ee60159aa7cfd87175809b6557da7fb9dcb44ca430", - "sha256:9f081281064b5180dc0ef60e256224a280ff16f603a99f3dd4ba6334ebb65f7e" + "sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf", + "sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041" ], - "version": "==2.5.0" + "version": "==2.6.0" }, "pycparser": { "hashes": [ @@ -203,21 +213,38 @@ ], "version": "==1.3.0" }, + "python-dateutil": { + "hashes": [ + "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", + "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" + ], + "markers": "python_version >= '2.7'", + "version": "==2.8.0" + }, "pyyaml": { "hashes": [ - "sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3", - "sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043", - "sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7", - "sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265", - "sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391", - "sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778", - "sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225", - "sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955", - "sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e", - "sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190", - "sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd" + "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", + "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", + "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", + "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", + "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", + "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", + "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", + "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", + "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", + "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", + "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", + "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", + "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" ], - "version": "==5.1.1" + "version": "==5.1.2" + }, + "s3transfer": { + "hashes": [ + "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", + "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba" + ], + "version": "==0.2.1" }, "six": { "hashes": [ @@ -225,6 +252,14 @@ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], "version": "==1.12.0" + }, + "urllib3": { + "hashes": [ + "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", + "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" + ], + "markers": "python_version >= '3.4'", + "version": "==1.25.6" } }, "develop": { @@ -250,17 +285,17 @@ }, "arrow": { "hashes": [ - "sha256:03404b624e89ac5e4fc19c52045fa0f3203419fd4dd64f6e8958c522580a574a", - "sha256:41be7ea4c53c2cf57bf30f2d614f60c411160133f7a0a8c49111c30fb7e725b5" + "sha256:10257c5daba1a88db34afa284823382f4963feca7733b9107956bed041aff24f", + "sha256:c2325911fcd79972cf493cfd957072f9644af8ad25456201ae1ede3316576eb4" ], - "version": "==0.14.2" + "version": "==0.15.2" }, "asn1crypto": { "hashes": [ - "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87", - "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49" + "sha256:d02bf8ea1b964a5ff04ac7891fe3a39150045d1e5e4fe99273ba677d11b92a04", + "sha256:f822954b90c4c44f002e2cd46d636ab630f1fe4df22c816a82b66505c404eb2a" ], - "version": "==0.24.0" + "version": "==1.0.0" }, "atomicwrites": { "hashes": [ @@ -271,31 +306,16 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2", + "sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396" ], - "version": "==19.1.0" + "version": "==19.2.0" }, "aws-sam-translator": { "hashes": [ - "sha256:db872c43bdfbbae9fc8c9201e6a7aeb9a661cda116a94708ab0577b46a38b962" + "sha256:3c615bff465fcf6a7990b9f84d002d55c75cd3e52d98e727d24959756ab0f0b1" ], - "version": "==1.11.0" - }, - "backports.functools-lru-cache": { - "hashes": [ - "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a", - "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd" - ], - "markers": "python_version == '2.7'", - "version": "==1.5" - }, - "backports.ssl-match-hostname": { - "hashes": [ - "sha256:bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2" - ], - "markers": "python_version < '3.5'", - "version": "==3.7.0.1" + "version": "==1.14.0" }, "binaryornot": { "hashes": [ @@ -306,17 +326,19 @@ }, "boto3": { "hashes": [ - "sha256:794a9a4b6a9e40c1ac57a377de609872d28d62afe4295c48cdc1b1c92f96ab8e", - "sha256:962b078568cc520869ea2842f307864c9abc30ad5ed160e12b2a89debf220161" + "sha256:60e711f1113be926bcec1cfe62fa336438d021ce834f4a5228beead3b4bc5142", + "sha256:8c9b9b2422c1baa84c0f331ee86ac4d265e1e7d321ce7ba58dbb863585c2191f" ], - "version": "==1.9.168" + "index": "pypi", + "version": "==1.9.241" }, "botocore": { "hashes": [ - "sha256:675f2b66af486dd02f5825601bb0c8378773999f8705c6f75450849ca41fed80", - "sha256:c3fc314c0e0aa13aa024d272d991e23d37550050abf96b3c7dea889ed1743723" + "sha256:897415ec68b2cbb65a7d32965c456d332bb2eb936e533c9ad6064cd15e67c0c1", + "sha256:e35c2e6b8946be9063d7988b19dea2b6136b80c0e3469b6a076c574d5abca6b3" ], - "version": "==1.12.168" + "index": "pypi", + "version": "==1.12.241" }, "cerberus": { "hashes": [ @@ -326,10 +348,10 @@ }, "certifi": { "hashes": [ - "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", - "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae" + "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", + "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" ], - "version": "==2019.3.9" + "version": "==2019.9.11" }, "cffi": { "hashes": [ @@ -366,10 +388,10 @@ }, "cfn-lint": { "hashes": [ - "sha256:16500272b5e2a3e9eb94e6b42c0a652b1a084fa96f8c5efb07ff4adde3b448ec", - "sha256:ce4bf8c0e6d5b8ad3f1b4cd8261e1eca795d61fb3723e3dce85c78eff95ab120" + "sha256:32a3e1597c681c9411205bff48b421db60908c304c472f4644d5a32bc9ecdad3", + "sha256:623cf0f6ed4c7b3fb4563549e25ac68119478900d89ca976639f11c5d85063a6" ], - "version": "==0.21.5" + "version": "==0.24.3" }, "chardet": { "hashes": [ @@ -398,22 +420,6 @@ ], "version": "==0.3.9" }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3.2'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, "cookiecutter": { "hashes": [ "sha256:1316a52e1c1f08db0c9efbf7d876dbc01463a74b155a0d83e722be88beda9a3e", @@ -452,11 +458,11 @@ }, "docutils": { "hashes": [ - "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", - "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", - "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" + "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", + "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", + "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" ], - "version": "==0.14" + "version": "==0.15.2" }, "entrypoints": { "hashes": [ @@ -465,16 +471,6 @@ ], "version": "==0.3" }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, "fasteners": { "hashes": [ "sha256:007e4d2b2d4a10093f67e932e5166722d2eab83b77724156e92ad013c6226574", @@ -484,26 +480,10 @@ }, "flake8": { "hashes": [ - "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661", - "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8" + "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", + "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696" ], - "version": "==3.7.7" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.0'", - "version": "==1.0.2" - }, - "functools32": { - "hashes": [ - "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", - "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" - ], - "markers": "python_version == '2.7'", - "version": "==3.2.3.post2" + "version": "==3.7.8" }, "future": { "hashes": [ @@ -511,14 +491,6 @@ ], "version": "==0.17.1" }, - "futures": { - "hashes": [ - "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", - "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" - ], - "markers": "python_version == '2.6' or python_version == '2.7'", - "version": "==3.2.0" - }, "git-url-parse": { "hashes": [ "sha256:4655ee22f1d8bf7a1eb1066c1da16529b186966c6d8331f7f55686a76a9f7aef", @@ -536,18 +508,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7", - "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db" + "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", + "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" ], - "version": "==0.18" - }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" + "markers": "python_version < '3.8'", + "version": "==0.23" }, "jinja2": { "hashes": [ @@ -572,10 +537,10 @@ }, "jsonpatch": { "hashes": [ - "sha256:49f29cab70e9068db3b1dc6b656cbe2ee4edf7dfe9bf5a0055f17a4b6804a4b9", - "sha256:8bf92fa26bc42c346c03bd4517722a8e4f429225dbe775ac774b2c70d95dbd33" + "sha256:83f29a2978c13da29bfdf89da9d65542d62576479caf215df19632d7dc04c6e6", + "sha256:cbb72f8bf35260628aea6b508a107245f757d1ec839a19c34349985e2c05645a" ], - "version": "==1.23" + "version": "==1.24" }, "jsonpointer": { "hashes": [ @@ -586,10 +551,10 @@ }, "jsonschema": { "hashes": [ - "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08", - "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02" + "sha256:5f9c0a719ca2ce14c5de2fd350a64fd2d13e8539db29836a86adc990bb1a068f", + "sha256:8d4a2b7b6c2237e0199c8ea1a6d3e05bf118e289ae2b9d7ba444182a2959560d" ], - "version": "==2.6.0" + "version": "==3.0.2" }, "markupsafe": { "hashes": [ @@ -655,26 +620,17 @@ }, "more-itertools": { "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", + "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" + "version": "==7.2.0" }, "packaging": { "hashes": [ - "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", - "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" ], - "version": "==19.0" - }, - "pathlib2": { - "hashes": [ - "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", - "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7" - ], - "version": "==2.3.3" + "version": "==19.2" }, "pathspec": { "hashes": [ @@ -698,17 +654,17 @@ }, "pluggy": { "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" + "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", + "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" ], - "version": "==0.12.0" + "version": "==0.13.0" }, "poyo": { "hashes": [ - "sha256:c34a5413191210ed564640510e9c4a4ba3b698746d6b454d46eb5bfb30edcd1d", - "sha256:d1c317054145a6b1ca0608b5e676b943ddc3bfd671f886a2fe09288b98221edb" + "sha256:3e2ca8e33fdc3c411cd101ca395668395dd5dc7ac775b8e809e3def9f9fe041a", + "sha256:e26956aa780c45f011ca9886f044590e2d8fd8b61db7b1c1cf4e0869f48ed4dd" ], - "version": "==0.4.2" + "version": "==0.5.0" }, "psutil": { "hashes": [ @@ -753,13 +709,6 @@ ], "version": "==2.19" }, - "pyfiglet": { - "hashes": [ - "sha256:c6c2321755d09267b438ec7b936825a4910fec696292139e664ca8670e103639", - "sha256:d555bcea17fbeaf70eaefa48bb119352487e629c9b56f30f383e2c62dd67a01c" - ], - "version": "==0.8.post1" - }, "pyflakes": { "hashes": [ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", @@ -769,17 +718,23 @@ }, "pyparsing": { "hashes": [ - "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a", - "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03" + "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", + "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" ], - "version": "==2.4.0" + "version": "==2.4.2" + }, + "pyrsistent": { + "hashes": [ + "sha256:34b47fa169d6006b32e99d4b3c4031f155e6e68ebcc107d6454852e8e0ee6533" + ], + "version": "==0.15.4" }, "pytest": { "hashes": [ - "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45", - "sha256:926855726d8ae8371803f7b2e6ec0a69953d9c6311fa7c3b6c1b929ff92d27da" + "sha256:13c1c9b22127a77fc684eee24791efafcef343335d855e3573791c68588fe1a5", + "sha256:d8ba7be9466f55ef96ba203fc0f90d0cf212f2f927e69186e1353e30bc7f62e5" ], - "version": "==4.6.3" + "version": "==5.2.0" }, "python-dateutil": { "hashes": [ @@ -799,19 +754,21 @@ }, "pyyaml": { "hashes": [ - "sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3", - "sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043", - "sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7", - "sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265", - "sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391", - "sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778", - "sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225", - "sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955", - "sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e", - "sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190", - "sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd" + "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", + "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", + "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", + "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", + "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", + "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", + "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", + "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", + "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", + "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", + "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", + "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", + "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" ], - "version": "==5.1.1" + "version": "==5.1.2" }, "requests": { "hashes": [ @@ -820,40 +777,36 @@ ], "version": "==2.22.0" }, - "ruamel.ordereddict": { - "hashes": [ - "sha256:08b4b19fe518d32251a5338e039c4dc9eb0876f2919f94c9b8d2f9446ea80806", - "sha256:150ce8e6c514a2a2b62753622a75874962561f8e5eeec81a3172ab952807bf0b", - "sha256:45541836cbfdde630033cae7bbbe35acbac87a0ceec79f944b7a3bedd940fe78", - "sha256:854dd4a524811b16111b1107d8a751e4ca064d2bb103d3d91deab75de36b6620", - "sha256:aee2fa23e884249b4284b728888c553d551e5bfd4de2731f10153fd7813ec55f", - "sha256:bf0a198c8ce5d973c24e5dba12d3abc254996788ca6ad8448eabc6aa710db149" - ], - "markers": "platform_python_implementation == 'CPython' and python_version <= '2.7'", - "version": "==0.4.13" - }, "ruamel.yaml": { "hashes": [ - "sha256:17dbf6b7362e7aee8494f7a0f5cffd44902a6331fe89ef0853b855a7930ab845", - "sha256:23731c9efb79f3f5609dedffeb6c5c47a68125fd3d4b157d9fc71b1cd49076a9", - "sha256:2bbdd598ae57bac20968cf9028cc67d37d83bdb7942a94b9478110bc72193148", - "sha256:34586084cdd60845a3e1bece2b58f0a889be25450db8cc0ea143ddf0f40557a2", - "sha256:35957fedbb287b01313bb5c556ffdc70c0277c3500213b5e73dfd8716f748d77", - "sha256:414cb87a40974a575830b406ffab4ab8c6cbd82eeb73abd2a9d1397c1f0223e1", - "sha256:428775be75db68d908b17e4e8dda424c410222f170dc173246aa63e972d094b3", - "sha256:514f670f7d36519bda504d507edfe63e3c20489f86c86d42bc4d9a6dbdf82c7b", - "sha256:5cb962c1ac6887c5da29138fbbe3b4b7705372eb54e599907fa63d4cd743246d", - "sha256:5f6e30282cf70fb7754e1a5f101e27b5240009766376e131b31ab49f14fe81be", - "sha256:86f8e010af6af0b4f42de2d0d9b19cb441e61d3416082186f9dd03c8552d13ad", - "sha256:8d47ed1e557d546bd2dfe54f504d7274274602ff7a0652cde84c258ad6c2d96d", - "sha256:98668876720bce1ac08562d8b93a564a80e3397e442c7ea19cebdcdf73da7f74", - "sha256:9e1f0ddc18d8355dcf5586a5d90417df56074f237812b8682a93b62cca9d2043", - "sha256:a7bc812a72a79d6b7dbb96fa5bee3950464b65ec055d3abc4db6572f2373a95c", - "sha256:b72e13f9f206ee103247b07afd5a39c8b1aa98e8eba80ddba184d030337220ba", - "sha256:bcff8ea9d916789e85e24beed8830c157fb8bc7c313e554733a8151540e66c01", - "sha256:c76e78b3bab652069b8d6f7889b0e72f3455c2b854b2e0a8818393d149ad0a0d" + "sha256:0db639b1b2742dae666c6fc009b8d1931ef15c9276ef31c0673cc6dcf766cf40", + "sha256:412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408" ], - "version": "==0.15.97" + "version": "==0.16.5" + }, + "ruamel.yaml.clib": { + "hashes": [ + "sha256:1e77424825caba5553bbade750cec2277ef130647d685c2b38f68bc03453bac6", + "sha256:392b7c371312abf27fb549ec2d5e0092f7ef6e6c9f767bfb13e83cb903aca0fd", + "sha256:4d55386129291b96483edcb93b381470f7cd69f97585829b048a3d758d31210a", + "sha256:550168c02d8de52ee58c3d8a8193d5a8a9491a5e7b2462d27ac5bf63717574c9", + "sha256:57933a6986a3036257ad7bf283529e7c19c2810ff24c86f4a0cfeb49d2099919", + "sha256:615b0396a7fad02d1f9a0dcf9f01202bf9caefee6265198f252c865f4227fcc6", + "sha256:77556a7aa190be9a2bd83b7ee075d3df5f3c5016d395613671487e79b082d784", + "sha256:7aee724e1ff424757b5bd8f6c5bbdb033a570b2b4683b17ace4dbe61a99a657b", + "sha256:8073c8b92b06b572e4057b583c3d01674ceaf32167801fe545a087d7a1e8bf52", + "sha256:9c6d040d0396c28d3eaaa6cb20152cb3b2f15adf35a0304f4f40a3cf9f1d2448", + "sha256:a0ff786d2a7dbe55f9544b3f6ebbcc495d7e730df92a08434604f6f470b899c5", + "sha256:b1b7fcee6aedcdc7e62c3a73f238b3d080c7ba6650cd808bce8d7761ec484070", + "sha256:b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c", + "sha256:d0d3ac228c9bbab08134b4004d748cf9f8743504875b3603b3afbb97e3472947", + "sha256:d10e9dd744cf85c219bf747c75194b624cc7a94f0c80ead624b06bfa9f61d3bc", + "sha256:ea4362548ee0cbc266949d8a441238d9ad3600ca9910c3fe4e82ee3a50706973", + "sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad", + "sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e" + ], + "markers": "platform_python_implementation == 'CPython' and python_version < '3.8'", + "version": "==0.2.0" }, "s3transfer": { "hashes": [ @@ -862,23 +815,6 @@ ], "version": "==0.2.1" }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, "sh": { "hashes": [ "sha256:ae3258c5249493cebe73cb4e18253a41ed69262484bad36fdb3efcb8ad8870bb", @@ -901,10 +837,10 @@ }, "taskcat": { "hashes": [ - "sha256:af4149d6b951cbc4974e5a03f8eb8c137e1f81ac98a348715eb6dc287f728b2a" + "sha256:e84eb198c74ca677b589889d4e6877568e25858235d51cdd99a8128d525b63b2" ], "index": "pypi", - "version": "==0.8.35" + "version": "==0.8.47" }, "testinfra": { "hashes": [ @@ -920,22 +856,13 @@ ], "version": "==0.1.2" }, - "typing": { - "hashes": [ - "sha256:4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d", - "sha256:57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4", - "sha256:a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a" - ], - "markers": "python_version < '3.5'", - "version": "==3.6.6" - }, "urllib3": { "hashes": [ - "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", - "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232" + "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", + "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" ], - "markers": "python_version == '2.7'", - "version": "==1.25.3" + "markers": "python_version >= '3.4'", + "version": "==1.25.6" }, "wcwidth": { "hashes": [ @@ -953,30 +880,30 @@ }, "whichcraft": { "hashes": [ - "sha256:7533870f751901a0ce43c93cc9850186e9eba7fe58c924dfb435968ba9c9fa4e", - "sha256:fecddd531f237ffc5db8b215409afb18fa30300699064cca4817521b4fc81815" + "sha256:acdbb91b63d6a15efbd6430d1d7b2d36e44a71697e93e19b7ded477afd9fce87", + "sha256:deda9266fbb22b8c64fd3ee45c050d61139cd87419765f588e37c8d23e236dd9" ], - "version": "==0.5.2" + "version": "==0.6.1" }, "yamllint": { "hashes": [ - "sha256:9a4fec2d40804979de5f54453fd1551bc1f8b59a7ad4a26fd7f26aeca34a83af", - "sha256:f97cd763fe7b588444a94cc44fd3764b832a613b5250baa2bfe8b84c91e4c330" + "sha256:67173339f28868260ce5912abfefa10e115ceb1d2ac1c4d8c7acc8c4ef6c9a8a", + "sha256:70a6f8316851254e197a6231c35577be29fa2fbe2c77390a54c9a50217cdaa13" ], - "version": "==1.16.0" + "version": "==1.17.0" }, "yattag": { "hashes": [ - "sha256:d7214d100315093e3ddc34da9840acbfa65c79ec84b48a8191ddf535353c2e3f" + "sha256:47d1c842e0da596bac081fcc047f2d6fd778b16d20745a28c00ce99d80831fbc" ], - "version": "==1.11.2" + "version": "==1.12.2" }, "zipp": { "hashes": [ - "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d", - "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3" + "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", + "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" ], - "version": "==0.5.1" + "version": "==0.6.0" } } } diff --git a/bin/install-ansible b/bin/install-ansible index a6da6bf..8a32ac5 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -10,7 +10,14 @@ set -e # Luckily AmazonLinux2 and Ubuntu use the same package name for # pip. This may need some logic if other distros are added. Note: # Parsing /etc/os-release is probably a good starting point for that. -./bin/pacapt install --noconfirm python-pip +# +# Additionally we need to install boto3 and botocore, as the Ansible +# AWS modules manage to escape the virtualenv and invoke the native python. + +./bin/pacapt install --noconfirm \ + python-pip \ + python-boto3 \ + python-botocore export PATH=$PATH:/usr/local/bin # See Pipfile and Pipfile.lock. From f5ebeaedb0836c6737f5f5dbc258284fae0b7768 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 14:23:05 +1000 Subject: [PATCH 12/93] DCD-686: Amazon Linux 2 has updated its Ansible packages so we can remove pipenv from the installation phase. --- Pipfile | 2 +- Pipfile.lock | 62 ++++------------------------------------ bin/ansible-with-atl-env | 11 ++++--- bin/install-ansible | 23 ++++----------- 4 files changed, 17 insertions(+), 81 deletions(-) diff --git a/Pipfile b/Pipfile index 4d8129d..55724ff 100644 --- a/Pipfile +++ b/Pipfile @@ -4,7 +4,7 @@ verify_ssl = true name = "pypi" [packages] -ansible = "==2.7.11" +ansible = "==2.8.2" boto3 = "==1.9.241" botocore = "==1.12.241" diff --git a/Pipfile.lock b/Pipfile.lock index 976eed7..4c43753 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fe304fca8752522c4a630677978735c57d39f161e7d0046ea128a21c7c28e373" + "sha256": "8f8923741e447b125ad9cb5c3912ba86e2239e753c7211ce1f074097892e1b6f" }, "pipfile-spec": 6, "requires": { @@ -18,10 +18,10 @@ "default": { "ansible": { "hashes": [ - "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" + "sha256:1e5ba829ca0602c55b33da399b06f99b135a34014b661d1c36d8892a1e2d3730" ], "index": "pypi", - "version": "==2.7.11" + "version": "==2.8.2" }, "asn1crypto": { "hashes": [ @@ -30,27 +30,6 @@ ], "version": "==1.0.0" }, - "bcrypt": { - "hashes": [ - "sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89", - "sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42", - "sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294", - "sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161", - "sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31", - "sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5", - "sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c", - "sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0", - "sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de", - "sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e", - "sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052", - "sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09", - "sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105", - "sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133", - "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", - "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" - ], - "version": "==3.1.7" - }, "boto3": { "hashes": [ "sha256:60e711f1113be926bcec1cfe62fa336438d021ce834f4a5228beead3b4bc5142", @@ -176,43 +155,12 @@ ], "version": "==1.1.1" }, - "paramiko": { - "hashes": [ - "sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf", - "sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041" - ], - "version": "==2.6.0" - }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" ], "version": "==2.19" }, - "pynacl": { - "hashes": [ - "sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255", - "sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c", - "sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e", - "sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae", - "sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621", - "sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56", - "sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39", - "sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310", - "sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1", - "sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a", - "sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786", - "sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b", - "sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b", - "sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f", - "sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20", - "sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415", - "sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715", - "sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1", - "sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0" - ], - "version": "==1.3.0" - }, "python-dateutil": { "hashes": [ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", @@ -265,10 +213,10 @@ "develop": { "ansible": { "hashes": [ - "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" + "sha256:1e5ba829ca0602c55b33da399b06f99b135a34014b661d1c36d8892a1e2d3730" ], "index": "pypi", - "version": "==2.7.11" + "version": "==2.8.2" }, "ansible-lint": { "hashes": [ diff --git a/bin/ansible-with-atl-env b/bin/ansible-with-atl-env index 29d5fee..580b4d4 100755 --- a/bin/ansible-with-atl-env +++ b/bin/ansible-with-atl-env @@ -14,9 +14,8 @@ source $ENV_FILE set +a # Use Ansible from virtualenv if provided -pipenv run \ - ansible-playbook -v \ - $ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \ - -i $INV \ - $PLAYBOOK \ - 2>&1 | tee --append $LOG_FILE +ansible-playbook -v \ + $ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \ + -i $INV \ + $PLAYBOOK \ + 2>&1 | tee --append $LOG_FILE diff --git a/bin/install-ansible b/bin/install-ansible index 8a32ac5..ac95ed8 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -2,24 +2,13 @@ set -e -# The Amazon Linux 2 Ansible package is 2.4, which has issue -# interacting with RDS, so use pipenv to install a known-good version. -# Another alternative here would be nix, however that has issues -# installing as root, and can be slow in practice. - -# Luckily AmazonLinux2 and Ubuntu use the same package name for -# pip. This may need some logic if other distros are added. Note: -# Parsing /etc/os-release is probably a good starting point for that. -# -# Additionally we need to install boto3 and botocore, as the Ansible -# AWS modules manage to escape the virtualenv and invoke the native python. +# Amazon Linux 2 packages Ansible separately, so enable the repo +. /etc/os-release +if [[ $ID == 'amzn' ]]; then + amazon-linux-extras enable ansible2 +fi ./bin/pacapt install --noconfirm \ - python-pip \ + ansible \ python-boto3 \ python-botocore -export PATH=$PATH:/usr/local/bin - -# See Pipfile and Pipfile.lock. -pip install pipenv -pipenv sync From 0c5f82c438608cdffa7ab96366c5a3c359fc08a8 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 14:57:20 +1000 Subject: [PATCH 13/93] DCD-686: Move git version to where it is used. --- roles/linux_common/defaults/main.yml | 1 + roles/product_common/defaults/main.yml | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/linux_common/defaults/main.yml b/roles/linux_common/defaults/main.yml index 561baf2..cb751b1 100644 --- a/roles/linux_common/defaults/main.yml +++ b/roles/linux_common/defaults/main.yml @@ -1,3 +1,4 @@ --- atl_product_user_uid: '2001' +git_version: "2.14.4" diff --git a/roles/product_common/defaults/main.yml b/roles/product_common/defaults/main.yml index cb807b0..ce62ba4 100644 --- a/roles/product_common/defaults/main.yml +++ b/roles/product_common/defaults/main.yml @@ -2,7 +2,6 @@ java_version: "1.8.0" java_major_version: "8" postgres_version: "9.6" -git_version: "2.14.4" # Disable these when using the product installer, otherwise we end up # fighting with it. From 2a0a1814c8facbdc791ab4aed9b8a2cc563efac8 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 14:58:14 +1000 Subject: [PATCH 14/93] DCD-686: Remove molecule as the manifest fetch isn't really testable. --- .../molecule/default/Dockerfile.j2 | 14 ------------- .../molecule/default/molecule.yml | 20 ------------------- .../molecule/default/playbook.yml | 12 ----------- .../molecule/default/tests/test_default.py | 19 ------------------ 4 files changed, 65 deletions(-) delete mode 100644 roles/restore_metadata/molecule/default/Dockerfile.j2 delete mode 100644 roles/restore_metadata/molecule/default/molecule.yml delete mode 100644 roles/restore_metadata/molecule/default/playbook.yml delete mode 100644 roles/restore_metadata/molecule/default/tests/test_default.py diff --git a/roles/restore_metadata/molecule/default/Dockerfile.j2 b/roles/restore_metadata/molecule/default/Dockerfile.j2 deleted file mode 100644 index e6aa95d..0000000 --- a/roles/restore_metadata/molecule/default/Dockerfile.j2 +++ /dev/null @@ -1,14 +0,0 @@ -# Molecule managed - -{% if item.registry is defined %} -FROM {{ item.registry.url }}/{{ item.image }} -{% else %} -FROM {{ item.image }} -{% endif %} - -RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ - elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ - elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ - elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ - elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ - elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/restore_metadata/molecule/default/molecule.yml b/roles/restore_metadata/molecule/default/molecule.yml deleted file mode 100644 index c6a6d26..0000000 --- a/roles/restore_metadata/molecule/default/molecule.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -dependency: - name: galaxy -driver: - name: docker -lint: - name: yamllint -platforms: - - name: amazon_linux2 - image: amazonlinux:2 - - name: ubuntu_lts - image: ubuntu:bionic -provisioner: - name: ansible - lint: - name: ansible-lint -verifier: - name: testinfra - lint: - name: flake8 diff --git a/roles/restore_metadata/molecule/default/playbook.yml b/roles/restore_metadata/molecule/default/playbook.yml deleted file mode 100644 index 6e24bad..0000000 --- a/roles/restore_metadata/molecule/default/playbook.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -- name: Converge - hosts: all - vars: - atl_product_user: "testuser" - atl_product_home: "/opt/atlassian/product" - atl_product_installation_base: "/opt/atlassian/product/install" - atl_installer_temp: "/opt/atlassian/temp" - atl_product_home_shared: "/media/atl/jira/shared" - atl_product_shared_plugins: "/media/atl/jira/shared/plugins/" - roles: - - role: restore_metadata diff --git a/roles/restore_metadata/molecule/default/tests/test_default.py b/roles/restore_metadata/molecule/default/tests/test_default.py deleted file mode 100644 index b29ef83..0000000 --- a/roles/restore_metadata/molecule/default/tests/test_default.py +++ /dev/null @@ -1,19 +0,0 @@ -import os -import pytest - -import testinfra.utils.ansible_runner - -testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( - os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') - - -def test_user_created(host): - user = host.user('testuser') - assert user.exists - - -@pytest.mark.parametrize('exe', [ - '/usr/bin/git' -]) -def test_package_exes(host, exe): - assert host.file(exe).exists From 3dca4691778840b24b6e20e811833a5e3dc440e1 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 3 Oct 2019 16:11:58 +1000 Subject: [PATCH 15/93] DCD-686: Load the downloaded manifest into a var. --- roles/restore_metadata/tasks/main.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 5264854..477aa8d 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -35,4 +35,9 @@ dest: "{{ atl_backup_manifest_dest }}" when: atl_backup_manifest_url.scheme != 's3' + - name: Load parameters from manifest + include_vars: + file: "{{ atl_backup_manifest_dest }}" + name: atl_backup_manifest + when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From 0f6bbe0f89149f78a912754e1ec7fb6cd186ae20 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 08:19:33 +1000 Subject: [PATCH 16/93] DCD-686: Add comment describing role. --- roles/restore_metadata/tasks/main.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/restore_metadata/tasks/main.yml index 477aa8d..8a62e9d 100644 --- a/roles/restore_metadata/tasks/main.yml +++ b/roles/restore_metadata/tasks/main.yml @@ -1,5 +1,9 @@ --- +# This role will attempt to fetch and load the backup manifest from a +# remote HTTP or S3 URL. On successful completion the contents of JSON +# or YAML document will be in the var `atl_backup_manifest`. + - block: - name: Ensure temp directory is present From 6a940a718354688ee3c94bb2ad8b538905ab825c Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 09:04:50 +1000 Subject: [PATCH 17/93] DCD-686: Rename manifest fetching role to be more descriptive. --- roles/{restore_metadata => load_backup_manifest}/tasks/main.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename roles/{restore_metadata => load_backup_manifest}/tasks/main.yml (100%) diff --git a/roles/restore_metadata/tasks/main.yml b/roles/load_backup_manifest/tasks/main.yml similarity index 100% rename from roles/restore_metadata/tasks/main.yml rename to roles/load_backup_manifest/tasks/main.yml From 5e85f71ae8580c011da703d38e15a04f85eed8c6 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 11:39:43 +1000 Subject: [PATCH 18/93] DCD-686: Update pipelines config. --- bin/install-ansible | 9 ++ bitbucket-pipelines.yml | 86 +++++++------------ pipeline_generator/Makefile | 2 +- pipeline_generator/pipeline.py | 5 +- .../templates/bitbucket-pipelines.yml.j2 | 4 +- 5 files changed, 43 insertions(+), 63 deletions(-) diff --git a/bin/install-ansible b/bin/install-ansible index ac95ed8..f94539d 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -12,3 +12,12 @@ fi ansible \ python-boto3 \ python-botocore + +if [[ $1 == "--dev" ]]; then + ./bin/pacapt install --noconfirm \ + python-dev python-pip \ + python3-dev python3-pip + + pip install pipenv + pipenv sync --dev +fi diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index a63c197..7cccd33 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -1,6 +1,6 @@ --- -image: atlassian/default-image:2 +image: ubuntu:disco options: size: 2x @@ -32,8 +32,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 1 - step: @@ -41,8 +40,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 2 - step: @@ -50,8 +48,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 3 - step: @@ -59,8 +56,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 4 - step: @@ -68,8 +64,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 5 - step: @@ -77,8 +72,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 6 - step: @@ -86,8 +80,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 7 - step: @@ -95,8 +88,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 8 - step: @@ -104,8 +96,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 9 - step: @@ -113,8 +104,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 10 - step: @@ -122,8 +112,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 11 - step: @@ -131,8 +120,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 12 - step: @@ -140,8 +128,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 13 - step: @@ -149,8 +136,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 14 - step: @@ -158,8 +144,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 15 - step: @@ -167,8 +152,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 16 - step: @@ -176,8 +160,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 17 - step: @@ -185,8 +168,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 18 - step: @@ -194,8 +176,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 19 - step: @@ -203,8 +184,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 20 - step: @@ -212,8 +192,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 21 - step: @@ -221,8 +200,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 22 - step: @@ -230,8 +208,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 23 - step: @@ -239,8 +216,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 24 - step: @@ -248,8 +224,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 25 - step: @@ -257,8 +232,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 26 - step: @@ -266,8 +240,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 27 - step: @@ -275,8 +248,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 28 diff --git a/pipeline_generator/Makefile b/pipeline_generator/Makefile index 5b6e164..aed9280 100644 --- a/pipeline_generator/Makefile +++ b/pipeline_generator/Makefile @@ -1,2 +1,2 @@ generate-pipeline: - @python pipeline.py + @python3 pipeline.py diff --git a/pipeline_generator/pipeline.py b/pipeline_generator/pipeline.py index 62b1408..c225bbe 100644 --- a/pipeline_generator/pipeline.py +++ b/pipeline_generator/pipeline.py @@ -46,14 +46,13 @@ class Step: class ScriptCommand: - INSTALL_PACKAGES_COMMAND = "apt-get update && apt-get install -y virtualenv python-dev" - INSTALL_ANSIBLE_COMMAND = "./bin/install-ansible" + PACKAGE_INSTALL_COMMAND = "./bin/install-ansible --dev" def __init__(self, test_command): self.test_command = test_command def all_commands(self): - return [self.INSTALL_PACKAGES_COMMAND, self.INSTALL_ANSIBLE_COMMAND, self.test_command] + return [self.PACKAGE_INSTALL_COMMAND, self.test_command] def main(): diff --git a/pipeline_generator/templates/bitbucket-pipelines.yml.j2 b/pipeline_generator/templates/bitbucket-pipelines.yml.j2 index 9b7fe42..4432857 100644 --- a/pipeline_generator/templates/bitbucket-pipelines.yml.j2 +++ b/pipeline_generator/templates/bitbucket-pipelines.yml.j2 @@ -1,6 +1,6 @@ --- -image: atlassian/default-image:2 +image: ubuntu:disco options: size: 2x @@ -36,4 +36,4 @@ pipelines: {% for scriptCommand in parallel_step.scriptCommands -%} - {{ scriptCommand }} {% endfor %} - {% endfor %} \ No newline at end of file + {% endfor %} From b82ab6327a5e1c6aadc801324d39f01169822362 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 11:43:19 +1000 Subject: [PATCH 19/93] DCD-686: Need to force update on Ubuntu. --- bitbucket-pipelines.yml | 56 +++++++++++++++++----------------- pipeline_generator/pipeline.py | 2 +- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index 7cccd33..83f6b71 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -32,7 +32,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 1 - step: @@ -40,7 +40,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 2 - step: @@ -48,7 +48,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 3 - step: @@ -56,7 +56,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 4 - step: @@ -64,7 +64,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 5 - step: @@ -72,7 +72,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 6 - step: @@ -80,7 +80,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 7 - step: @@ -88,7 +88,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 8 - step: @@ -96,7 +96,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 9 - step: @@ -104,7 +104,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 10 - step: @@ -112,7 +112,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 11 - step: @@ -120,7 +120,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 12 - step: @@ -128,7 +128,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 13 - step: @@ -136,7 +136,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 14 - step: @@ -144,7 +144,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 15 - step: @@ -152,7 +152,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 16 - step: @@ -160,7 +160,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 17 - step: @@ -168,7 +168,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 18 - step: @@ -176,7 +176,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 19 - step: @@ -184,7 +184,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 20 - step: @@ -192,7 +192,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 21 - step: @@ -200,7 +200,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 22 - step: @@ -208,7 +208,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 23 - step: @@ -216,7 +216,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 24 - step: @@ -224,7 +224,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 25 - step: @@ -232,7 +232,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 26 - step: @@ -240,7 +240,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 27 - step: @@ -248,7 +248,7 @@ pipelines: services: - docker script: - - ./bin/install-ansible --dev + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 28 diff --git a/pipeline_generator/pipeline.py b/pipeline_generator/pipeline.py index c225bbe..fd9edbb 100644 --- a/pipeline_generator/pipeline.py +++ b/pipeline_generator/pipeline.py @@ -46,7 +46,7 @@ class Step: class ScriptCommand: - PACKAGE_INSTALL_COMMAND = "./bin/install-ansible --dev" + PACKAGE_INSTALL_COMMAND = "apt-get update && ./bin/install-ansible --dev" def __init__(self, test_command): self.test_command = test_command From ba25ab8f73bf3b191cd2775df326cfd34e1dd35f Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 11:57:54 +1000 Subject: [PATCH 20/93] DCD-686: Update to Debian Buster for better compability with Amazon Linux 2. --- Pipfile | 4 +- Pipfile.lock | 212 ++++++++++++------ bin/install-ansible | 3 +- bitbucket-pipelines.yml | 2 +- .../templates/bitbucket-pipelines.yml.j2 | 2 +- 5 files changed, 150 insertions(+), 73 deletions(-) diff --git a/Pipfile b/Pipfile index 55724ff..e8b700a 100644 --- a/Pipfile +++ b/Pipfile @@ -5,8 +5,6 @@ name = "pypi" [packages] ansible = "==2.8.2" -boto3 = "==1.9.241" -botocore = "==1.12.241" [dev-packages] molecule = "==2.20.1" @@ -16,4 +14,4 @@ taskcat = "*" Jinja2 = "*" [requires] -python_version = "3.7" +python_version = "2.7" diff --git a/Pipfile.lock b/Pipfile.lock index 4c43753..0508d9e 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "8f8923741e447b125ad9cb5c3912ba86e2239e753c7211ce1f074097892e1b6f" + "sha256": "32443872340b7d7f286060fb7ce8c930db8eb565aab1702fca75daaec230177a" }, "pipfile-spec": 6, "requires": { - "python_version": "3.7" + "python_version": "2.7" }, "sources": [ { @@ -30,22 +30,6 @@ ], "version": "==1.0.0" }, - "boto3": { - "hashes": [ - "sha256:60e711f1113be926bcec1cfe62fa336438d021ce834f4a5228beead3b4bc5142", - "sha256:8c9b9b2422c1baa84c0f331ee86ac4d265e1e7d321ce7ba58dbb863585c2191f" - ], - "index": "pypi", - "version": "==1.9.241" - }, - "botocore": { - "hashes": [ - "sha256:897415ec68b2cbb65a7d32965c456d332bb2eb936e533c9ad6064cd15e67c0c1", - "sha256:e35c2e6b8946be9063d7988b19dea2b6136b80c0e3469b6a076c574d5abca6b3" - ], - "index": "pypi", - "version": "==1.12.241" - }, "cffi": { "hashes": [ "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", @@ -100,13 +84,23 @@ ], "version": "==2.7" }, - "docutils": { + "enum34": { "hashes": [ - "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", - "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", - "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" + "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", + "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", + "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", + "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" ], - "version": "==0.15.2" + "markers": "python_version < '3'", + "version": "==1.1.6" + }, + "ipaddress": { + "hashes": [ + "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", + "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" + ], + "markers": "python_version < '3'", + "version": "==1.0.22" }, "jinja2": { "hashes": [ @@ -115,13 +109,6 @@ ], "version": "==2.10.1" }, - "jmespath": { - "hashes": [ - "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", - "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" - ], - "version": "==0.9.4" - }, "markupsafe": { "hashes": [ "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", @@ -161,14 +148,6 @@ ], "version": "==2.19" }, - "python-dateutil": { - "hashes": [ - "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", - "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" - ], - "markers": "python_version >= '2.7'", - "version": "==2.8.0" - }, "pyyaml": { "hashes": [ "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", @@ -187,27 +166,12 @@ ], "version": "==5.1.2" }, - "s3transfer": { - "hashes": [ - "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", - "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba" - ], - "version": "==0.2.1" - }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], "version": "==1.12.0" - }, - "urllib3": { - "hashes": [ - "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", - "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" - ], - "markers": "python_version >= '3.4'", - "version": "==1.25.6" } }, "develop": { @@ -261,9 +225,24 @@ }, "aws-sam-translator": { "hashes": [ - "sha256:3c615bff465fcf6a7990b9f84d002d55c75cd3e52d98e727d24959756ab0f0b1" + "sha256:6563aa3b534e7ad672d580ecd3dfa92021e81b4e5983604c0df7ee0a07b3ed99" ], - "version": "==1.14.0" + "version": "==1.15.0" + }, + "backports.functools-lru-cache": { + "hashes": [ + "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a", + "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd" + ], + "markers": "python_version == '2.7'", + "version": "==1.5" + }, + "backports.ssl-match-hostname": { + "hashes": [ + "sha256:bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2" + ], + "markers": "python_version < '3.5'", + "version": "==3.7.0.1" }, "binaryornot": { "hashes": [ @@ -274,19 +253,17 @@ }, "boto3": { "hashes": [ - "sha256:60e711f1113be926bcec1cfe62fa336438d021ce834f4a5228beead3b4bc5142", - "sha256:8c9b9b2422c1baa84c0f331ee86ac4d265e1e7d321ce7ba58dbb863585c2191f" + "sha256:4189e1ffed768bd0efd754a0abedebce19495ba2aa6b2f5e20f29ba80f81f9cb", + "sha256:fa4e28166922feeb9b7b56134c1acc817a1bca36284a0035bc08a3dab1853a9f" ], - "index": "pypi", - "version": "==1.9.241" + "version": "==1.9.242" }, "botocore": { "hashes": [ - "sha256:897415ec68b2cbb65a7d32965c456d332bb2eb936e533c9ad6064cd15e67c0c1", - "sha256:e35c2e6b8946be9063d7988b19dea2b6136b80c0e3469b6a076c574d5abca6b3" + "sha256:7af52e0aabaf4ba045e1a5832308e70e1ea4b499b71624857f09aed2ba5e667c", + "sha256:dd62d63bcd3176c92775c52d3e879288f89bf0ac0039df14ea31f25d693acd6d" ], - "index": "pypi", - "version": "==1.12.241" + "version": "==1.12.242" }, "cerberus": { "hashes": [ @@ -368,6 +345,22 @@ ], "version": "==0.3.9" }, + "configparser": { + "hashes": [ + "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", + "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" + ], + "markers": "python_version < '3'", + "version": "==4.0.2" + }, + "contextlib2": { + "hashes": [ + "sha256:7197aa736777caac513dbd800944c209a49765bf1979b12b037dce0277077ed3", + "sha256:9d2c67f18c1f9b6db1b46317f7f784aa82789d2ee5dea5d9c0f0f2a764eb862e" + ], + "markers": "python_version < '3'", + "version": "==0.6.0" + }, "cookiecutter": { "hashes": [ "sha256:1316a52e1c1f08db0c9efbf7d876dbc01463a74b155a0d83e722be88beda9a3e", @@ -419,6 +412,16 @@ ], "version": "==0.3" }, + "enum34": { + "hashes": [ + "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", + "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", + "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", + "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + ], + "markers": "python_version < '3'", + "version": "==1.1.6" + }, "fasteners": { "hashes": [ "sha256:007e4d2b2d4a10093f67e932e5166722d2eab83b77724156e92ad013c6226574", @@ -433,12 +436,36 @@ ], "version": "==3.7.8" }, + "funcsigs": { + "hashes": [ + "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", + "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + ], + "markers": "python_version < '3.3'", + "version": "==1.0.2" + }, + "functools32": { + "hashes": [ + "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", + "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" + ], + "markers": "python_version < '3.2'", + "version": "==3.2.3.post2" + }, "future": { "hashes": [ "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" ], "version": "==0.17.1" }, + "futures": { + "hashes": [ + "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16", + "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794" + ], + "markers": "python_version == '2.6' or python_version == '2.7'", + "version": "==3.3.0" + }, "git-url-parse": { "hashes": [ "sha256:4655ee22f1d8bf7a1eb1066c1da16529b186966c6d8331f7f55686a76a9f7aef", @@ -462,6 +489,14 @@ "markers": "python_version < '3.8'", "version": "==0.23" }, + "ipaddress": { + "hashes": [ + "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", + "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" + ], + "markers": "python_version < '3'", + "version": "==1.0.22" + }, "jinja2": { "hashes": [ "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", @@ -580,11 +615,19 @@ ], "version": "==19.2" }, + "pathlib2": { + "hashes": [ + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" + ], + "markers": "python_version == '3.4.*' or python_version < '3'", + "version": "==2.3.5" + }, "pathspec": { "hashes": [ - "sha256:54a5eab895d89f342b52ba2bffe70930ef9f8d96e398cccf530d21fa0516a873" + "sha256:e285ccc8b0785beadd4c18e5708b12bb8fcf529a1e61215b3feff1d1e559ea5c" ], - "version": "==0.5.9" + "version": "==0.6.0" }, "pbr": { "hashes": [ @@ -725,6 +768,18 @@ ], "version": "==2.22.0" }, + "ruamel.ordereddict": { + "hashes": [ + "sha256:281051d26eb2b18ef3d920e1e260716a52bd058a6b1a2f324102fc6a15cb8d4a", + "sha256:36fe0af3a02a0e1199447d050e6c3a1f5bd4c7d68e4c260f6a7a058fb4da71cb", + "sha256:4375a70d5d217069a8349bf5fbc27aa4cf1aedfbf03ce94df113b75d22d1a1e2", + "sha256:4cd0ec38dac57a4054dda14b0a5eea1a877dcc73106131ef08513fb89ba95a22", + "sha256:4f641c4de9082866b9e88497ad8050dca38c5ddbb8cb7ae9316da9db257092b2", + "sha256:7324310945c6b47218255b5d75ccbc74d435221c44652ec4406b1a871ddc3bc3" + ], + "markers": "platform_python_implementation == 'CPython' and python_version <= '2.7'", + "version": "==0.4.14" + }, "ruamel.yaml": { "hashes": [ "sha256:0db639b1b2742dae666c6fc009b8d1931ef15c9276ef31c0673cc6dcf766cf40", @@ -763,6 +818,23 @@ ], "version": "==0.2.1" }, + "scandir": { + "hashes": [ + "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", + "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", + "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", + "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", + "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", + "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", + "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", + "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", + "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", + "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", + "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" + ], + "markers": "python_version < '3.5'", + "version": "==1.10.0" + }, "sh": { "hashes": [ "sha256:ae3258c5249493cebe73cb4e18253a41ed69262484bad36fdb3efcb8ad8870bb", @@ -804,12 +876,20 @@ ], "version": "==0.1.2" }, + "typing": { + "hashes": [ + "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23", + "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36", + "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714" + ], + "markers": "python_version < '3.5'", + "version": "==3.7.4.1" + }, "urllib3": { "hashes": [ "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" ], - "markers": "python_version >= '3.4'", "version": "==1.25.6" }, "wcwidth": { diff --git a/bin/install-ansible b/bin/install-ansible index f94539d..4654763 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -15,8 +15,7 @@ fi if [[ $1 == "--dev" ]]; then ./bin/pacapt install --noconfirm \ - python-dev python-pip \ - python3-dev python3-pip + python-dev python-pip pip install pipenv pipenv sync --dev diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index 83f6b71..ec993c6 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -1,6 +1,6 @@ --- -image: ubuntu:disco +image: debian:buster options: size: 2x diff --git a/pipeline_generator/templates/bitbucket-pipelines.yml.j2 b/pipeline_generator/templates/bitbucket-pipelines.yml.j2 index 4432857..6b89e62 100644 --- a/pipeline_generator/templates/bitbucket-pipelines.yml.j2 +++ b/pipeline_generator/templates/bitbucket-pipelines.yml.j2 @@ -1,6 +1,6 @@ --- -image: ubuntu:disco +image: debian:buster options: size: 2x From 60b823cd02dac1c4fb791225a84c74f80145281b Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 12:10:07 +1000 Subject: [PATCH 21/93] DCD-686: Try with python3. --- Pipfile | 2 +- Pipfile.lock | 142 +------------------------------------------- bin/install-ansible | 4 +- 3 files changed, 5 insertions(+), 143 deletions(-) diff --git a/Pipfile b/Pipfile index e8b700a..766c833 100644 --- a/Pipfile +++ b/Pipfile @@ -14,4 +14,4 @@ taskcat = "*" Jinja2 = "*" [requires] -python_version = "2.7" +python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 0508d9e..6a3e5b4 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "32443872340b7d7f286060fb7ce8c930db8eb565aab1702fca75daaec230177a" + "sha256": "dae68e0cb0d94bd8016536c7ed22b2e4cf8292aafd988c421aa31a851763a83e" }, "pipfile-spec": 6, "requires": { - "python_version": "2.7" + "python_version": "3.7" }, "sources": [ { @@ -84,24 +84,6 @@ ], "version": "==2.7" }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" - }, "jinja2": { "hashes": [ "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", @@ -229,21 +211,6 @@ ], "version": "==1.15.0" }, - "backports.functools-lru-cache": { - "hashes": [ - "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a", - "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd" - ], - "markers": "python_version == '2.7'", - "version": "==1.5" - }, - "backports.ssl-match-hostname": { - "hashes": [ - "sha256:bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2" - ], - "markers": "python_version < '3.5'", - "version": "==3.7.0.1" - }, "binaryornot": { "hashes": [ "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061", @@ -345,22 +312,6 @@ ], "version": "==0.3.9" }, - "configparser": { - "hashes": [ - "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", - "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" - ], - "markers": "python_version < '3'", - "version": "==4.0.2" - }, - "contextlib2": { - "hashes": [ - "sha256:7197aa736777caac513dbd800944c209a49765bf1979b12b037dce0277077ed3", - "sha256:9d2c67f18c1f9b6db1b46317f7f784aa82789d2ee5dea5d9c0f0f2a764eb862e" - ], - "markers": "python_version < '3'", - "version": "==0.6.0" - }, "cookiecutter": { "hashes": [ "sha256:1316a52e1c1f08db0c9efbf7d876dbc01463a74b155a0d83e722be88beda9a3e", @@ -412,16 +363,6 @@ ], "version": "==0.3" }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, "fasteners": { "hashes": [ "sha256:007e4d2b2d4a10093f67e932e5166722d2eab83b77724156e92ad013c6226574", @@ -436,36 +377,12 @@ ], "version": "==3.7.8" }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.3'", - "version": "==1.0.2" - }, - "functools32": { - "hashes": [ - "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", - "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" - ], - "markers": "python_version < '3.2'", - "version": "==3.2.3.post2" - }, "future": { "hashes": [ "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" ], "version": "==0.17.1" }, - "futures": { - "hashes": [ - "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16", - "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794" - ], - "markers": "python_version == '2.6' or python_version == '2.7'", - "version": "==3.3.0" - }, "git-url-parse": { "hashes": [ "sha256:4655ee22f1d8bf7a1eb1066c1da16529b186966c6d8331f7f55686a76a9f7aef", @@ -489,14 +406,6 @@ "markers": "python_version < '3.8'", "version": "==0.23" }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" - }, "jinja2": { "hashes": [ "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", @@ -615,14 +524,6 @@ ], "version": "==19.2" }, - "pathlib2": { - "hashes": [ - "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", - "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" - ], - "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.5" - }, "pathspec": { "hashes": [ "sha256:e285ccc8b0785beadd4c18e5708b12bb8fcf529a1e61215b3feff1d1e559ea5c" @@ -732,7 +633,6 @@ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" ], - "markers": "python_version >= '2.7'", "version": "==2.8.0" }, "python-gilt": { @@ -768,18 +668,6 @@ ], "version": "==2.22.0" }, - "ruamel.ordereddict": { - "hashes": [ - "sha256:281051d26eb2b18ef3d920e1e260716a52bd058a6b1a2f324102fc6a15cb8d4a", - "sha256:36fe0af3a02a0e1199447d050e6c3a1f5bd4c7d68e4c260f6a7a058fb4da71cb", - "sha256:4375a70d5d217069a8349bf5fbc27aa4cf1aedfbf03ce94df113b75d22d1a1e2", - "sha256:4cd0ec38dac57a4054dda14b0a5eea1a877dcc73106131ef08513fb89ba95a22", - "sha256:4f641c4de9082866b9e88497ad8050dca38c5ddbb8cb7ae9316da9db257092b2", - "sha256:7324310945c6b47218255b5d75ccbc74d435221c44652ec4406b1a871ddc3bc3" - ], - "markers": "platform_python_implementation == 'CPython' and python_version <= '2.7'", - "version": "==0.4.14" - }, "ruamel.yaml": { "hashes": [ "sha256:0db639b1b2742dae666c6fc009b8d1931ef15c9276ef31c0673cc6dcf766cf40", @@ -818,23 +706,6 @@ ], "version": "==0.2.1" }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, "sh": { "hashes": [ "sha256:ae3258c5249493cebe73cb4e18253a41ed69262484bad36fdb3efcb8ad8870bb", @@ -876,15 +747,6 @@ ], "version": "==0.1.2" }, - "typing": { - "hashes": [ - "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23", - "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36", - "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714" - ], - "markers": "python_version < '3.5'", - "version": "==3.7.4.1" - }, "urllib3": { "hashes": [ "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", diff --git a/bin/install-ansible b/bin/install-ansible index 4654763..953663f 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -15,8 +15,8 @@ fi if [[ $1 == "--dev" ]]; then ./bin/pacapt install --noconfirm \ - python-dev python-pip + python3-dev python3-pip - pip install pipenv + pip3 install pipenv pipenv sync --dev fi From 0209ad22b7aaf0941c195845510c48f0343c2748 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 12:24:59 +1000 Subject: [PATCH 22/93] DCD-686: Another combination of dev dependencies to work around compatability issues. --- Pipfile | 6 ++-- Pipfile.lock | 82 +++++++++++++++++++++++++++++++++++++++++++--------- 2 files changed, 71 insertions(+), 17 deletions(-) diff --git a/Pipfile b/Pipfile index 766c833..c0d22ba 100644 --- a/Pipfile +++ b/Pipfile @@ -4,11 +4,11 @@ verify_ssl = true name = "pypi" [packages] -ansible = "==2.8.2" +ansible = "==2.7.11" [dev-packages] -molecule = "==2.20.1" -docker = "==4.0.1" +molecule = "==2.20.2" +docker = "==4.1.0" six = "*" taskcat = "*" Jinja2 = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 6a3e5b4..d5ce753 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "dae68e0cb0d94bd8016536c7ed22b2e4cf8292aafd988c421aa31a851763a83e" + "sha256": "3e6fecddc35743d370fbdbd68b57b6edc588fe026f58e67e6f6343c7dafc2ee6" }, "pipfile-spec": 6, "requires": { @@ -18,10 +18,10 @@ "default": { "ansible": { "hashes": [ - "sha256:1e5ba829ca0602c55b33da399b06f99b135a34014b661d1c36d8892a1e2d3730" + "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" ], "index": "pypi", - "version": "==2.8.2" + "version": "==2.7.11" }, "asn1crypto": { "hashes": [ @@ -30,6 +30,27 @@ ], "version": "==1.0.0" }, + "bcrypt": { + "hashes": [ + "sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89", + "sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42", + "sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294", + "sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161", + "sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31", + "sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5", + "sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c", + "sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0", + "sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de", + "sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e", + "sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052", + "sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09", + "sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105", + "sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133", + "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", + "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" + ], + "version": "==3.1.7" + }, "cffi": { "hashes": [ "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", @@ -124,12 +145,43 @@ ], "version": "==1.1.1" }, + "paramiko": { + "hashes": [ + "sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf", + "sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041" + ], + "version": "==2.6.0" + }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" ], "version": "==2.19" }, + "pynacl": { + "hashes": [ + "sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255", + "sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c", + "sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e", + "sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae", + "sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621", + "sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56", + "sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39", + "sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310", + "sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1", + "sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a", + "sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786", + "sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b", + "sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b", + "sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f", + "sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20", + "sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415", + "sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715", + "sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1", + "sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0" + ], + "version": "==1.3.0" + }, "pyyaml": { "hashes": [ "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", @@ -159,10 +211,10 @@ "develop": { "ansible": { "hashes": [ - "sha256:1e5ba829ca0602c55b33da399b06f99b135a34014b661d1c36d8892a1e2d3730" + "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" ], "index": "pypi", - "version": "==2.8.2" + "version": "==2.7.11" }, "ansible-lint": { "hashes": [ @@ -342,11 +394,11 @@ }, "docker": { "hashes": [ - "sha256:3db499d4d25847fed86acf8e100c989f7bc0f75a6fff6c52855726ada1d124f6", - "sha256:f61c37d721b489b7d55ef631b241be2d6a5884c3ffe63dc8f7dd9a3c3cd60489" + "sha256:6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1", + "sha256:8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7" ], "index": "pypi", - "version": "==4.0.1" + "version": "==4.1.0" }, "docutils": { "hashes": [ @@ -497,11 +549,11 @@ }, "molecule": { "hashes": [ - "sha256:0e9ef6845cdf2a01f6c386445e4e54add3f515a033ee16b7b658e6122c8f0d76", - "sha256:621797c54299775f284bbb010d5bb9be485500eecaaa14a476cbc0df285d0da7" + "sha256:5fa56e52602364716dd5aa55e1dd70400f2094b8cc3c458869e5382e84149065", + "sha256:9dc29b9ef172b26532752784687faca2e868c84e2d90f0b4f018d81d76a8b30a" ], "index": "pypi", - "version": "==2.20.1" + "version": "==2.20.2" }, "monotonic": { "hashes": [ @@ -633,6 +685,7 @@ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" ], + "markers": "python_version >= '2.7'", "version": "==2.8.0" }, "python-gilt": { @@ -735,10 +788,10 @@ }, "testinfra": { "hashes": [ - "sha256:8dbbf25039674d419598f576c5652947cebdf7cbbea8f23acacc80271009c6cb", - "sha256:d13dda899d5a051465f041a821363e2ebdd079391fbeae04089a2df7d35e3d54" + "sha256:16201d64659ec0c2d25f65d6ce1f5367668b7b4eb102450efd4f8983a399d7d0", + "sha256:5cebf61fee13c2e83b5e177431e751e243fc779293377c5e0c3b43910bb7e870" ], - "version": "==1.19.0" + "version": "==3.2.0" }, "tree-format": { "hashes": [ @@ -752,6 +805,7 @@ "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" ], + "markers": "python_version >= '3.4'", "version": "==1.25.6" }, "wcwidth": { From 606ac960d90ec57ca39a441a910c4b921125720f Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 13:35:17 +1000 Subject: [PATCH 23/93] DCD-686: Ansible 2.8 added a timeout to the yum module which broke everything. --- roles/aws_common/tasks/amazon.yml | 1 + roles/linux_common/tasks/amazon.yml | 1 + roles/nfs_server/tasks/amazon.yml | 1 + roles/nfs_server/tasks/ubuntu.yml | 2 +- roles/product_common/tasks/amazon.yml | 2 ++ 5 files changed, 6 insertions(+), 1 deletion(-) diff --git a/roles/aws_common/tasks/amazon.yml b/roles/aws_common/tasks/amazon.yml index a6592bf..f19485a 100644 --- a/roles/aws_common/tasks/amazon.yml +++ b/roles/aws_common/tasks/amazon.yml @@ -6,6 +6,7 @@ - ec2-utils - amazon-ssm-agent - amazon-efs-utils + lock_timeout: 30 - name: Install CloudWatch Agent yum: diff --git a/roles/linux_common/tasks/amazon.yml b/roles/linux_common/tasks/amazon.yml index 3be04db..ad70b98 100644 --- a/roles/linux_common/tasks/amazon.yml +++ b/roles/linux_common/tasks/amazon.yml @@ -6,3 +6,4 @@ - shadow-utils - libxml2 - git-{{ git_version }} + lock_timeout: 30 diff --git a/roles/nfs_server/tasks/amazon.yml b/roles/nfs_server/tasks/amazon.yml index ad2adee..67b71d0 100644 --- a/roles/nfs_server/tasks/amazon.yml +++ b/roles/nfs_server/tasks/amazon.yml @@ -4,3 +4,4 @@ yum: name: - nfs-utils + lock_timeout: 30 diff --git a/roles/nfs_server/tasks/ubuntu.yml b/roles/nfs_server/tasks/ubuntu.yml index 5bb5dcb..becb1d8 100644 --- a/roles/nfs_server/tasks/ubuntu.yml +++ b/roles/nfs_server/tasks/ubuntu.yml @@ -1,7 +1,7 @@ --- - name: Install Ubuntu-specific NFS packages - yum: + apt: name: - nfs-kernel-server - libnfs-utils diff --git a/roles/product_common/tasks/amazon.yml b/roles/product_common/tasks/amazon.yml index c02f864..0175922 100644 --- a/roles/product_common/tasks/amazon.yml +++ b/roles/product_common/tasks/amazon.yml @@ -4,9 +4,11 @@ yum: name: - java-{{ java_version }}-openjdk-devel + lock_timeout: 30 when: atl_use_system_jdk - name: Install other base packages on Amazon Linux yum: name: - dejavu-fonts-common # Required by the installer + lock_timeout: 30 From bbff7f94bb34bbdc4eddbbecc59297413d802ed8 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 13:49:43 +1000 Subject: [PATCH 24/93] DCD-686: Revert lockfiles as we should move to fine control over Ansible versions (again). --- roles/aws_common/tasks/amazon.yml | 1 - roles/linux_common/tasks/amazon.yml | 1 - roles/nfs_server/tasks/amazon.yml | 1 - roles/product_common/tasks/amazon.yml | 2 -- 4 files changed, 5 deletions(-) diff --git a/roles/aws_common/tasks/amazon.yml b/roles/aws_common/tasks/amazon.yml index f19485a..a6592bf 100644 --- a/roles/aws_common/tasks/amazon.yml +++ b/roles/aws_common/tasks/amazon.yml @@ -6,7 +6,6 @@ - ec2-utils - amazon-ssm-agent - amazon-efs-utils - lock_timeout: 30 - name: Install CloudWatch Agent yum: diff --git a/roles/linux_common/tasks/amazon.yml b/roles/linux_common/tasks/amazon.yml index ad70b98..3be04db 100644 --- a/roles/linux_common/tasks/amazon.yml +++ b/roles/linux_common/tasks/amazon.yml @@ -6,4 +6,3 @@ - shadow-utils - libxml2 - git-{{ git_version }} - lock_timeout: 30 diff --git a/roles/nfs_server/tasks/amazon.yml b/roles/nfs_server/tasks/amazon.yml index 67b71d0..ad2adee 100644 --- a/roles/nfs_server/tasks/amazon.yml +++ b/roles/nfs_server/tasks/amazon.yml @@ -4,4 +4,3 @@ yum: name: - nfs-utils - lock_timeout: 30 diff --git a/roles/product_common/tasks/amazon.yml b/roles/product_common/tasks/amazon.yml index 0175922..c02f864 100644 --- a/roles/product_common/tasks/amazon.yml +++ b/roles/product_common/tasks/amazon.yml @@ -4,11 +4,9 @@ yum: name: - java-{{ java_version }}-openjdk-devel - lock_timeout: 30 when: atl_use_system_jdk - name: Install other base packages on Amazon Linux yum: name: - dejavu-fonts-common # Required by the installer - lock_timeout: 30 From 87ac31ea3a101b50b4dddad610ef12f85f1a03d7 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 13:55:37 +1000 Subject: [PATCH 25/93] DCD-686: Move back to pipenv-based Ansible installation to work around bugs (again). --- Pipfile | 4 +- Pipfile.lock | 106 ++++++++++++++++++++------------------- bin/ansible-with-atl-env | 11 ++-- bin/install-ansible | 18 ++----- 4 files changed, 69 insertions(+), 70 deletions(-) diff --git a/Pipfile b/Pipfile index c0d22ba..e78f929 100644 --- a/Pipfile +++ b/Pipfile @@ -4,7 +4,9 @@ verify_ssl = true name = "pypi" [packages] -ansible = "==2.7.11" +ansible = "==2.8.5" +boto3 = "==1.9.242" +botocore = "==1.12.242" [dev-packages] molecule = "==2.20.2" diff --git a/Pipfile.lock b/Pipfile.lock index d5ce753..725f9f6 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "3e6fecddc35743d370fbdbd68b57b6edc588fe026f58e67e6f6343c7dafc2ee6" + "sha256": "4cec168800858d0bce3beaae422011cb6458d548e9a8fc1807f39bd7c8eb24e6" }, "pipfile-spec": 6, "requires": { @@ -18,10 +18,10 @@ "default": { "ansible": { "hashes": [ - "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" + "sha256:8e9403e755ce8ef27b6066cdd7a4c567aa80ebe2fd90d0ff8efa0a725d246986" ], "index": "pypi", - "version": "==2.7.11" + "version": "==2.8.5" }, "asn1crypto": { "hashes": [ @@ -30,26 +30,21 @@ ], "version": "==1.0.0" }, - "bcrypt": { + "boto3": { "hashes": [ - "sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89", - "sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42", - "sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294", - "sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161", - "sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31", - "sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5", - "sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c", - "sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0", - "sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de", - "sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e", - "sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052", - "sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09", - "sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105", - "sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133", - "sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7", - "sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc" + "sha256:4189e1ffed768bd0efd754a0abedebce19495ba2aa6b2f5e20f29ba80f81f9cb", + "sha256:fa4e28166922feeb9b7b56134c1acc817a1bca36284a0035bc08a3dab1853a9f" ], - "version": "==3.1.7" + "index": "pypi", + "version": "==1.9.242" + }, + "botocore": { + "hashes": [ + "sha256:7af52e0aabaf4ba045e1a5832308e70e1ea4b499b71624857f09aed2ba5e667c", + "sha256:dd62d63bcd3176c92775c52d3e879288f89bf0ac0039df14ea31f25d693acd6d" + ], + "index": "pypi", + "version": "==1.12.242" }, "cffi": { "hashes": [ @@ -105,6 +100,14 @@ ], "version": "==2.7" }, + "docutils": { + "hashes": [ + "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", + "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", + "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" + ], + "version": "==0.15.2" + }, "jinja2": { "hashes": [ "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", @@ -112,6 +115,13 @@ ], "version": "==2.10.1" }, + "jmespath": { + "hashes": [ + "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", + "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" + ], + "version": "==0.9.4" + }, "markupsafe": { "hashes": [ "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", @@ -145,42 +155,19 @@ ], "version": "==1.1.1" }, - "paramiko": { - "hashes": [ - "sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf", - "sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041" - ], - "version": "==2.6.0" - }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" ], "version": "==2.19" }, - "pynacl": { + "python-dateutil": { "hashes": [ - "sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255", - "sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c", - "sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e", - "sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae", - "sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621", - "sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56", - "sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39", - "sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310", - "sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1", - "sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a", - "sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786", - "sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b", - "sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b", - "sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f", - "sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20", - "sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415", - "sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715", - "sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1", - "sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0" + "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", + "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" ], - "version": "==1.3.0" + "markers": "python_version >= '2.7'", + "version": "==2.8.0" }, "pyyaml": { "hashes": [ @@ -200,21 +187,36 @@ ], "version": "==5.1.2" }, + "s3transfer": { + "hashes": [ + "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", + "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba" + ], + "version": "==0.2.1" + }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], "version": "==1.12.0" + }, + "urllib3": { + "hashes": [ + "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", + "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" + ], + "markers": "python_version >= '3.4'", + "version": "==1.25.6" } }, "develop": { "ansible": { "hashes": [ - "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" + "sha256:8e9403e755ce8ef27b6066cdd7a4c567aa80ebe2fd90d0ff8efa0a725d246986" ], "index": "pypi", - "version": "==2.7.11" + "version": "==2.8.5" }, "ansible-lint": { "hashes": [ @@ -275,6 +277,7 @@ "sha256:4189e1ffed768bd0efd754a0abedebce19495ba2aa6b2f5e20f29ba80f81f9cb", "sha256:fa4e28166922feeb9b7b56134c1acc817a1bca36284a0035bc08a3dab1853a9f" ], + "index": "pypi", "version": "==1.9.242" }, "botocore": { @@ -282,6 +285,7 @@ "sha256:7af52e0aabaf4ba045e1a5832308e70e1ea4b499b71624857f09aed2ba5e667c", "sha256:dd62d63bcd3176c92775c52d3e879288f89bf0ac0039df14ea31f25d693acd6d" ], + "index": "pypi", "version": "==1.12.242" }, "cerberus": { diff --git a/bin/ansible-with-atl-env b/bin/ansible-with-atl-env index 580b4d4..29d5fee 100755 --- a/bin/ansible-with-atl-env +++ b/bin/ansible-with-atl-env @@ -14,8 +14,9 @@ source $ENV_FILE set +a # Use Ansible from virtualenv if provided -ansible-playbook -v \ - $ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \ - -i $INV \ - $PLAYBOOK \ - 2>&1 | tee --append $LOG_FILE +pipenv run \ + ansible-playbook -v \ + $ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \ + -i $INV \ + $PLAYBOOK \ + 2>&1 | tee --append $LOG_FILE diff --git a/bin/install-ansible b/bin/install-ansible index 953663f..4f3f81c 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -2,21 +2,13 @@ set -e -# Amazon Linux 2 packages Ansible separately, so enable the repo -. /etc/os-release -if [[ $ID == 'amzn' ]]; then - amazon-linux-extras enable ansible2 -fi - ./bin/pacapt install --noconfirm \ - ansible \ - python-boto3 \ - python-botocore + python3-dev \ + python3-pip + +pip3 install pipenv +pipenv sync if [[ $1 == "--dev" ]]; then - ./bin/pacapt install --noconfirm \ - python3-dev python3-pip - - pip3 install pipenv pipenv sync --dev fi From d67f579f2f72253c7fe1862c40e108b6e62868ca Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Fri, 4 Oct 2019 15:42:44 +1000 Subject: [PATCH 26/93] DCD-686: Add download of the backups. --- roles/load_backup_manifest/tasks/main.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/roles/load_backup_manifest/tasks/main.yml b/roles/load_backup_manifest/tasks/main.yml index 8a62e9d..45d7e1f 100644 --- a/roles/load_backup_manifest/tasks/main.yml +++ b/roles/load_backup_manifest/tasks/main.yml @@ -44,4 +44,26 @@ file: "{{ atl_backup_manifest_dest }}" name: atl_backup_manifest + - name: Define the DB and home dump destinations + set_fact: + atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.db_dump | basename }}" + atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.shared_home_dump | basename }}" + + # FIXME: Here we fetch the backups. However we may wish to stream + # these directly from S3 to the target DB/FS to avoid requiring + # disk-space for the intermediate files. + - name: Fetch DB backup from S3 + aws_s3: + mode: get + bucket: "{{ atl_backup_manifest.db_dump | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.db_dump | urlsplit('path') }}" + dest: "{{ atl_backup_db_dest }}" + + - name: Fetch Home backup from S3 + aws_s3: + mode: get + bucket: "{{ atl_backup_manifest.shared_home_dump | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.shared_home_dump | urlsplit('path') }}" + dest: "{{ atl_backup_home_dest }}" + when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From fd466f1230e9890be2d36d9612fb4464a4de4172 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Tue, 8 Oct 2019 10:19:39 +1100 Subject: [PATCH 27/93] DCD-686: Another role rename to match functionality. --- roles/{load_backup_manifest => fetch_backups}/tasks/main.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename roles/{load_backup_manifest => fetch_backups}/tasks/main.yml (100%) diff --git a/roles/load_backup_manifest/tasks/main.yml b/roles/fetch_backups/tasks/main.yml similarity index 100% rename from roles/load_backup_manifest/tasks/main.yml rename to roles/fetch_backups/tasks/main.yml From 23ef172c914240aa1cc288a38c0a4602cda62963 Mon Sep 17 00:00:00 2001 From: Ben Partridge Date: Tue, 8 Oct 2019 10:15:16 +1100 Subject: [PATCH 28/93] JIRASERVER-66236: Set Jira collation, ctype to C, encoding to UNICODE --- aws_jira_dc_node.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/aws_jira_dc_node.yml b/aws_jira_dc_node.yml index f0af8a4..e95dbe3 100644 --- a/aws_jira_dc_node.yml +++ b/aws_jira_dc_node.yml @@ -12,6 +12,10 @@ atl_startup_systemd_params: - "LimitNOFILE=16384" + atl_jdbc_encoding: 'UNICODE' + atl_jdbc_collation: 'C' + atl_jdbc_ctype: 'C' + roles: - role: linux_common - role: aws_common From 9941a6ca2ca2bf751a6f9b031c55d1495a6c25b5 Mon Sep 17 00:00:00 2001 From: Ben Partridge Date: Tue, 8 Oct 2019 10:45:33 +1100 Subject: [PATCH 29/93] JIRASERVER-66236: use template0 as jira database template --- aws_jira_dc_node.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/aws_jira_dc_node.yml b/aws_jira_dc_node.yml index e95dbe3..41b7be6 100644 --- a/aws_jira_dc_node.yml +++ b/aws_jira_dc_node.yml @@ -15,6 +15,7 @@ atl_jdbc_encoding: 'UNICODE' atl_jdbc_collation: 'C' atl_jdbc_ctype: 'C' + atl_jdbc_template: 'template0' roles: - role: linux_common From 07e97ba35e7a92fdf2b5bd50966d474feb0675c1 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 13:00:53 +1100 Subject: [PATCH 30/93] DCD-686: Fix some issues with packages on Amazon Linux. --- bin/ansible-with-atl-env | 3 + bin/install-ansible | 15 +- bin/pacapt | 2561 -------------------------------------- 3 files changed, 15 insertions(+), 2564 deletions(-) delete mode 100755 bin/pacapt diff --git a/bin/ansible-with-atl-env b/bin/ansible-with-atl-env index 29d5fee..3685381 100755 --- a/bin/ansible-with-atl-env +++ b/bin/ansible-with-atl-env @@ -8,6 +8,9 @@ PLAYBOOK=${2:?"Playbook must be specified"} LOG_FILE=${3:-"/dev/null"} ENV_FILE=${4:-"/etc/atl"} +export PATH=/usr/local/bin:$PATH + + # Set the environment with default exports set -a source $ENV_FILE diff --git a/bin/install-ansible b/bin/install-ansible index 4f3f81c..b515cdf 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -2,9 +2,18 @@ set -e -./bin/pacapt install --noconfirm \ - python3-dev \ - python3-pip +source /etc/os-release +if [[ $ID = "amzn" ]]; then + yum install -y \ + python3-devel \ + python3-pip +else + apt-get update && \ + apt-get install -y \ + python3-dev \ + python3-pip +fi +export PATH=/usr/local/bin:$PATH pip3 install pipenv pipenv sync diff --git a/bin/pacapt b/bin/pacapt deleted file mode 100755 index bf1b711..0000000 --- a/bin/pacapt +++ /dev/null @@ -1,2561 +0,0 @@ -#!/usr/bin/env bash -# -# Purpose: A wrapper for all Unix package managers -# License: Fair license (http://www.opensource.org/licenses/fair) -# Source : http://github.com/icy/pacapt/ -# Version: 2.4.2 -# Authors: Anh K. Huynh et al. - -# Copyright (C) 2010 - 2019 \ -# | 10sr (10sr) -# | Alexander Dupuy (dupuy) -# | Anh K. Huynh (icy) -# | Antony Lee (anntzer) -# | Alex Lyon (Arcterus) -# | Carl X. Su (bcbcarl) -# | Cuong Manh Le (Gnouc) -# | Daniel YC Lin (dlintw) -# | Danny George (dangets) -# | Darshit Shah (darnir) -# | Dmitry Kudriavtsev (dkudriavtsev) -# | Eric Crosson (EricCrosson) -# | Evan Relf (evanrelf) -# | GijsTimmers (GijsTimmers) -# | Hà-Dương Nguyễn (cmpitg) -# | Huy Ngô (NgoHuy) -# | James Pearson (xiongchiamiov) -# | Janne Heß (dasJ) -# | Jiawei Zhou (4679) -# | Karol Blazewicz -# | Kevin Brubeck (unhammer) -# | Konrad Borowski (xfix) -# | Kylie McClain (somasis) -# | Valerio Pizzi (Pival81) -# | Siôn Le Roux (sinisterstuf) -# | Thiago Perrotta (thiagowfx) -# | Vojtech Letal (letalvoj) -# -# Usage of the works is permitted provided that this instrument is -# retained with the works, so that any entity that uses the works is -# notified of this instrument. -# -# DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY. -# - -_print_pacapt_version() { - cat <<_EOF_ -pacapt version '2.4.2' - -Copyright (C) 2010 - 2019 \\ - | 10sr (10sr) - | Alexander Dupuy (dupuy) - | Anh K. Huynh (icy) - | Antony Lee (anntzer) - | Alex Lyon (Arcterus) - | Carl X. Su (bcbcarl) - | Cuong Manh Le (Gnouc) - | Daniel YC Lin (dlintw) - | Danny George (dangets) - | Darshit Shah (darnir) - | Dmitry Kudriavtsev (dkudriavtsev) - | Eric Crosson (EricCrosson) - | Evan Relf (evanrelf) - | GijsTimmers (GijsTimmers) - | Hà-Dương Nguyễn (cmpitg) - | Huy Ngô (NgoHuy) - | James Pearson (xiongchiamiov) - | Janne Heß (dasJ) - | Jiawei Zhou (4679) - | Karol Blazewicz - | Kevin Brubeck (unhammer) - | Konrad Borowski (xfix) - | Kylie McClain (somasis) - | Valerio Pizzi (Pival81) - | Siôn Le Roux (sinisterstuf) - | Thiago Perrotta (thiagowfx) - | Vojtech Letal (letalvoj) - -Usage of the works is permitted provided that this -instrument is retained with the works, so that any -entity that uses the works is notified of this instrument. - -DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY. -_EOF_ -} - -export PACAPT_VERSION='2.4.2' - -_help() { - cat <<'EOF' -NAME - pacapt - An `ArchLinux`'s pacman-like wrapper for many package managers. - -SYNTAX - - $ pacapt - -BASIC OPTIONS - - -h or --help print this help message - -P print supported operations - -V print version information - -SYSGET STYLE OPERATIONS - - update Update package database - upgrade Upgrade system - install Install some packages - search Search some package - remove Remove some packages - autoremove Remove orphans (WIP; may not work correctly) - clean Clean package manager caches - -PACMAN STYLE OPERATIONS - - Query - -Q list all installed packages - -Qc show package's changelog - -Qe [] only list explicitly installed packages - -Qi print package status - -Ql list package's files - -Qm list installed packages that aren't available - in any installation source - -Qo query package that provides - -Qp query a package file (don't use package database) - -Qs search for installed package - - Synchronize - -S install package(s) - -Sg list groups - -Sg list packages in group - -Ss search for packages - -Su upgrade the system - -Sy update package database - -Suy update package database, then upgrade the system - - Remove / Clean up - -R remove some packages - -Sc delete old downloaded packages - -Scc delete all downloaded packages - -Sccc clean variant files. - (debian) See also http://dragula.viettug.org/blogs/646 - - Upgrade - -U upgrade or add package from local file path (or remote uri) - -OPTIONS - - -w download packages but don't install them - --noconfirm don't wait for user's confirmation - -EXAMPLES - - 1. To install a package from Debian's backports repository - $ pacapt -S foobar -t lenny-backports - $ pacapt -S -- -t lenny-backports foobar - - 2. To update package database and then update your system - $ pacapt -Syu - - 3. To download a package without installing it - $ pacapt -Sw foobar - - -ENVIRONMENT - - PACAPT_DEBUG - - This is useful for debugging purpose. The variable can be set to `auto` - or any valid packager. For example, on `Debian` system the two following - commands are the same and they will print out what the script would do: - - PACAPT_DEBUG=auto pacman -Su - PACAPT_DEBUG=dpkg pacman -Su - -NOTES - - When being executed on Arch-based system, the tool simply invokes - the system package manager (`/usr/bin/pacman`). - - Though you can specify option by its own word, for example, - $ pacapt -S -y -u - - it's always the best to combine them - $ pacapt -Syu - -READMORE - - Please visit https://github.com/icy/pacapt. -EOF - -} - - - - -_error() { - echo >&2 "Error: $*" - return 1 -} - -_warn() { - echo >&2 "Warning: $*" - return 0 -} - -_die() { - echo >&2 "$@" - exit 1 -} - -_not_implemented() { - # shellcheck disable=2153 - echo >&2 "${_PACMAN}: '${_POPT}:${_SOPT}:${_TOPT}' operation is invalid or not implemented." - return 1 -} - -_removing_is_dangerous() { - echo >&2 "${_PACMAN}: removing with '$*' is too dangerous" - return 1 -} - -_issue2pacman() { - local _pacman - - _pacman="$1"; shift - - # The following line is added by Daniel YC Lin to support SunOS. - # - # [ `uname` = "$1" ] && _PACMAN="$_pacman" && return - # - # This is quite tricky and fast, however I don't think it works - # on Linux/BSD systems. To avoid extra check, I slightly modify - # the code to make sure it's only applicable on SunOS. - # - [[ "$(uname)" == "SunOS" ]] && _PACMAN="$_pacman" && return - - $GREP -qis "$@" /etc/issue \ - && _PACMAN="$_pacman" && return - - $GREP -qis "$@" /etc/os-release \ - && _PACMAN="$_pacman" && return -} - -_PACMAN_detect() { - _PACMAN_found_from_script_name && return - - _issue2pacman sun_tools "SunOS" && return - _issue2pacman pacman "Arch Linux" && return - _issue2pacman dpkg "Debian GNU/Linux" && return - _issue2pacman dpkg "Ubuntu" && return - _issue2pacman cave "Exherbo Linux" && return - _issue2pacman yum "CentOS" && return - _issue2pacman yum "Red Hat" && return - # - # FIXME: The multiple package issue. - # - # On #63, Huy commented out this line. This is because new generation - # of Fedora uses `dnf`, and `yum` becomes a legacy tool. On old Fedora - # system, `yum` is still detectable by looking up `yum` binary. - # - # I'm not sure how to support this case easily. Let's wait, e.g, 5 years - # from now to make `dnf` becomes a default? Oh no! - # - # And here why `pacman` is still smart. Debian has a set of tools. - # Fedora has `yum` (and a set of add-ons). Now Fedora moves to `dnf`. - # This means that a package manager is not a heart of a system ;) - # - # _issue2pacman yum "Fedora" && return - _issue2pacman zypper "SUSE" && return - _issue2pacman pkg_tools "OpenBSD" && return - _issue2pacman pkg_tools "Bitrig" && return - _issue2pacman apk "Alpine Linux" && return - - [[ -z "$_PACMAN" ]] || return - - # Prevent a loop when this script is installed on non-standard system - if [[ -x "/usr/bin/pacman" ]]; then - $GREP -q "${FUNCNAME[0]}" '/usr/bin/pacman' >/dev/null 2>&1 - [[ $? -ge 1 ]] && _PACMAN="pacman" \ - && return - fi - - [[ -x "/usr/bin/apt-get" ]] && _PACMAN="dpkg" && return - [[ -x "/data/data/com.termux/files/usr/bin/apt-get" ]] && _PACMAN="dpkg" && return - [[ -x "/usr/bin/cave" ]] && _PACMAN="cave" && return - [[ -x "/usr/bin/dnf" ]] && _PACMAN="dnf" && return - [[ -x "/usr/bin/yum" ]] && _PACMAN="yum" && return - [[ -x "/opt/local/bin/port" ]] && _PACMAN="macports" && return - [[ -x "/usr/bin/emerge" ]] && _PACMAN="portage" && return - [[ -x "/usr/bin/zypper" ]] && _PACMAN="zypper" && return - [[ -x "/usr/sbin/pkg" ]] && _PACMAN="pkgng" && return - # make sure pkg_add is after pkgng, FreeBSD base comes with it until converted - [[ -x "/usr/sbin/pkg_add" ]] && _PACMAN="pkg_tools" && return - [[ -x "/usr/sbin/pkgadd" ]] && _PACMAN="sun_tools" && return - [[ -x "/sbin/apk" ]] && _PACMAN="apk" && return - [[ -x "/usr/bin/tazpkg" ]] && _PACMAN="tazpkg" && return - [[ -x "/usr/bin/swupd" ]] && _PACMAN="swupd" && return - - command -v brew >/dev/null && _PACMAN="homebrew" && return - - return 1 -} - -_translate_w() { - - echo "$_EOPT" | $GREP -q ":w:" || return 0 - - local _opt= - local _ret=0 - - case "$_PACMAN" in - "dpkg") _opt="-d";; - "cave") _opt="-f";; - "macports") _opt="fetch";; - "portage") _opt="--fetchonly";; - "zypper") _opt="--download-only";; - "pkgng") _opt="fetch";; - "yum") _opt="--downloadonly"; - if ! rpm -q 'yum-downloadonly' >/dev/null 2>&1; then - _error "'yum-downloadonly' package is required when '-w' is used." - _ret=1 - fi - ;; - "tazpkg") - _error "$_PACMAN: Use '$_PACMAN get' to download and save packages to current directory." - _ret=1 - ;; - "apk") _opt="fetch";; - *) - _opt="" - _ret=1 - - _error "$_PACMAN: Option '-w' is not supported/implemented." - ;; - esac - - echo $_opt - return "$_ret" -} - -_translate_debug() { - echo "$_EOPT" | $GREP -q ":v:" || return 0 - - case "$_PACMAN" in - "tazpkg") - _error "$_PACMAN: Option '-v' (debug) is not supported/implemented by tazpkg" - return 1 - ;; - esac - - echo "-v" -} - -_translate_noconfirm() { - - echo "$_EOPT" | $GREP -q ":noconfirm:" || return 0 - - local _opt= - local _ret=0 - - case "$_PACMAN" in - # FIXME: Update environment DEBIAN_FRONTEND=noninteractive - # FIXME: There is also --force-yes for a stronger case - "dpkg") _opt="--yes";; - "dnf") _opt="--assumeyes";; - "yum") _opt="--assumeyes";; - # FIXME: pacman has 'assume-yes' and 'assume-no' - # FIXME: zypper has better mode. Similar to dpkg (Debian). - "zypper") _opt="--no-confirm";; - "pkgng") _opt="-y";; - "tazpkg") _opt="--auto";; - *) - _opt="" - _ret=1 - _error "$_PACMAN: Option '--noconfirm' is not supported/implemented." - ;; - esac - - echo $_opt - return $_ret -} - -_translate_all() { - local _args="" - local _debug= - local _noconfirm= - - _debug="$(_translate_debug)" - _noconfirm="$(_translate_noconfirm)" - _args="$(_translate_w)" || return 1 - _args="${_args}${_noconfirm:+ }${_noconfirm}" || return 1 - _args="${_args}${_debug:+ }${_debug}" || return 1 - - export _EOPT="${_args# }" -} - -_print_supported_operations() { - local _pacman="$1" - echo -n "pacapt($_pacman): available operations:" - # shellcheck disable=2016 - $GREP -E "^${_pacman}_[^ \\t]+\\(\\)" "$0" \ - | $AWK -F '(' '{print $1}' \ - | sed -e "s/${_pacman}_//g" \ - | while read -r O; do - echo -n " $O" - done - echo -} - - -export _SUPPORTED_EXTERNALS=" - :conda - :tlmgr - :texlive - :gem - :npm - :pip -" -readonly _SUPPORTED_EXTERNALS - -_PACMAN_found_from_script_name() { - local _tmp_name= - local _pacman= - - _tmp_name="${BASH_SOURCE[0]:-?}" - if [[ "$_tmp_name" == "?" ]]; then - _error "Unable to get script name." - return 1 - fi - - _tmp_name="${_tmp_name##*/}" # base name (remove everything before the last `/`) - _tmp_name="${_tmp_name%.*}" # remove extension if any (remove everything from the last `.`) - _pacman="${_tmp_name##*-}" # remove every thing before the last `-` - - if grep -Eq -e ":$_pacman[[:space:]]*" <<< "$_SUPPORTED_EXTERNALS"; then - export _PACMAN="$_pacman" - return 0 - else - export _PACMAN="" - return 1 - fi -} - - - -_apk_init() { - : -} - -apk_Q() { - if [[ -z "$_TOPT" ]]; then - apk info - else - _not_implemented - fi -} - -apk_Qi() { - apk info -a -- "$@" -} - -apk_Ql() { - apk info -L -- "$@" -} - -apk_Qo() { - apk info --who-owns -- "$@" -} - -apk_Qs() { - apk info -- "*${*}*" -} - -apk_Qu() { - apk version -l '<' -} - -apk_R() { - apk del -- "$@" -} - -apk_Rn() { - apk del --purge -- "$@" -} - -apk_Rns() { - apk del --purge -r -- "$@" -} - -apk_Rs() { - apk del -r -- "$@" -} - -apk_S() { - case ${_EOPT} in - # Download only - ("fetch") shift - apk fetch -- "$@" ;; - (*) apk add $_TOPT -- "$@" ;; - esac -} - -apk_Sc() { - apk cache -v clean -} - -apk_Scc() { - rm -rf /var/cache/apk/* -} - -apk_Sccc() { - apk_Scc -} - -apk_Si() { - apk_Qi "$@" -} - -apk_Sii() { - apk info -r -- "$@" -} - -apk_Sl() { - apk search -v -- "$@" -} - -apk_Ss() { - apk_Sl "$@" -} - -apk_Su() { - apk upgrade -} - -apk_Suy() { - if [ "$#" -gt 0 ]; then - apk add -U -u -- "$@" - else - apk upgrade -U -a - fi -} - -apk_Sy() { - apk update -} - -apk_Sw() { - apk fetch -- "$@" -} - -apk_U() { - apk add --allow-untrusted -- "$@" -} - - - -_cave_init() { - shopt -u globstar -} - -cave_Q() { - if [[ "$_TOPT" == "q" ]]; then - cave show -f "${@:-world}" \ - | grep -v '^$' - else - cave show -f "${@:-world}" - fi -} - -cave_Qi() { - cave show "$@" -} - -cave_Ql() { - if [[ -n "$*" ]]; then - cave contents "$@" - return - fi - - cave show -f "${@:-world}" \ - | grep -v '^$' \ - | while read -r _pkg; do - if [[ "$_TOPT" == "q" ]]; then - cave --color no contents "$_pkg" - else - cave contents "$_pkg" - fi - done -} - -cave_Qo() { - cave owner "$@" -} - -cave_Qp() { - _not_implemented -} - -cave_Qu() { - if [[ -z "$*" ]];then - cave resolve -c world \ - | grep '^u.*' \ - | while read -r _pkg; do - echo "$_pkg" | cut -d'u' -f2- - done - else - cave resolve -c world \ - | grep '^u.*' \ - | grep -- "$@" - fi -} - -cave_Qs() { - cave show -f world | grep -- "$@" -} - -cave_Rs() { - if [[ "$_TOPT" == "" ]]; then - cave uninstall -r "$@" \ - && echo "Control-C to stop uninstalling..." \ - && sleep 2s \ - && cave uninstall -xr "$@" - else - cave purge "$@" \ - && echo "Control-C to stop uninstalling (+ dependencies)..." \ - && sleep 2s \ - && cave purge -x "$@" - fi -} - -cave_Rn() { - _not_implemented -} - -cave_Rns() { - _not_implemented -} - -cave_R() { - cave uninstall "$@" \ - && echo "Control-C to stop uninstalling..." \ - && sleep 2s \ - && cave uninstall -x "$@" -} - -cave_Si() { - cave show "$@" -} - -cave_Suy() { - cave sync && cave resolve -c "${@:-world}" \ - && echo "Control-C to stop upgrading..." \ - && sleep 2s \ - && cave resolve -cx "${@:-world}" -} - -cave_Su() { - cave resolve -c "$@" \ - && echo "Control-C to stop upgrading..." \ - && sleep 2s \ - && cave resolve -cx "$@" -} - -cave_Sy() { - cave sync "$@" -} - -cave_Ss() { - cave search "$@" -} - -cave_Sc() { - cave fix-cache "$@" -} - -cave_Scc() { - cave fix-cache "$@" -} - -cave_Sccc() { - #rm -fv /var/cache/paludis/* - _not_implemented -} - -cave_S() { - cave resolve $_TOPT "$@" \ - && echo "Control-C to stop installing..." \ - && sleep 2s \ - && cave resolve -x $_TOPT "$@" -} - -cave_U() { - _not_implemented -} - - - -_conda_init() { - : -} - -conda_Q() { - if [[ $# -gt 0 ]]; then - conda list "$(python -c 'import sys; print("^" + "|".join(sys.argv[1:]) + "$")' "$@")" - else - conda list - fi -} - -conda_R() { - conda remove "$@" -} - -conda_S() { - conda install "$@" -} - -conda_Sc() { - conda clean --all "$@" -} - -conda_Si() { - conda search "$@" --info -} - -conda_Ss() { - conda search "*$@*" -} - -conda_Suy() { - conda update --all "$@" -} - - - - -_dnf_init() { - : -} - -dnf_S() { - dnf install $_TOPT "$@" -} - -dnf_Sc() { - dnf clean expire-cache "$@" -} - -dnf_Scc() { - dnf clean packages "$@" -} - -dnf_Sccc() { - dnf clean all "$@" -} - -dnf_Si() { - dnf info "$@" -} - -dnf_Sg() { - if [[ $# -gt 0 ]]; then - dnf group info "$@" - else - dnf group list - fi -} - -dnf_Sl() { - dnf list available "$@" -} - -dnf_Ss() { - dnf search "$@" -} - -dnf_Su() { - dnf upgrade "$@" -} - -dnf_Suy() { - dnf upgrade "$@" -} - -dnf_Sw() { - dnf download "$@" -} - -dnf_Sy() { - dnf clean expire-cache && dnf check-update -} - -dnf_Q() { - if [[ "$_TOPT" == "q" ]]; then - rpm -qa --qf "%{NAME}\\n" - elif [[ "$_TOPT" == "" ]]; then - rpm -qa --qf "%{NAME} %{VERSION}\\n" - else - _not_implemented - fi -} - -dnf_Qc() { - rpm -q --changelog "$@" -} - -dnf_Qe() { - dnf repoquery --userinstalled "$@" -} - -dnf_Qi() { - dnf info "$@" -} - -dnf_Ql() { - rpm -ql "$@" -} - -dnf_Qm() { - dnf list extras -} - -dnf_Qo() { - rpm -qf "$@" -} - -dnf_Qp() { - rpm -qp "$@" -} - -dnf_Qs() { - rpm -qa "*${*}*" -} - -dnf_Qu() { - dnf list updates "$@" -} - -dnf_R() { - dnf remove "$@" -} - -dnf_U() { - dnf install "$@" -} - - - -_dpkg_init() { - : -} - -dpkg_Q() { - if [[ "$_TOPT" == "q" ]]; then - dpkg -l \ - | grep -E '^[hi]i' \ - | awk '{print $2}' - elif [[ "$_TOPT" == "" ]]; then - dpkg -l "$@" \ - | grep -E '^[hi]i' - else - _not_implemented - fi -} - -dpkg_Qi() { - dpkg-query -s "$@" -} - -dpkg_Ql() { - if [[ -n "$*" ]]; then - dpkg-query -L "$@" - return - fi - - dpkg -l \ - | grep -E '^[hi]i' \ - | awk '{print $2}' \ - | while read -r _pkg; do - if [[ "$_TOPT" == "q" ]]; then - dpkg-query -L "$_pkg" - else - dpkg-query -L "$_pkg" \ - | while read -r _line; do - echo "$_pkg $_line" - done - fi - done -} - -dpkg_Qo() { - dpkg-query -S "$@" -} - -dpkg_Qp() { - dpkg-deb -I "$@" -} - -dpkg_Qu() { - apt-get upgrade --trivial-only "$@" -} - -dpkg_Qs() { - # dpkg >= 1.16.2 dpkg-query -W -f='${db:Status-Abbrev} ${binary:Package}\t${Version}\t${binary:Summary}\n' - dpkg-query -W -f='${Status} ${Package}\t${Version}\t${Description}\n' \ - | grep -E '^((hold)|(install)|(deinstall))' \ - | sed -r -e 's#^(\w+ ){3}##g' \ - | grep -Ei "${@:-.}" -} - -dpkg_Rs() { - if [[ "$_TOPT" == "" ]]; then - apt-get autoremove "$@" - else - _not_implemented - fi -} - -dpkg_Rn() { - apt-get purge "$@" -} - -dpkg_Rns() { - apt-get --purge autoremove "$@" -} - -dpkg_R() { - apt-get remove "$@" -} - -dpkg_Si() { - apt-cache show "$@" -} - -dpkg_Suy() { - apt-get update \ - && apt-get upgrade "$@" \ - && apt-get dist-upgrade "$@" -} - -dpkg_Su() { - apt-get upgrade "$@" \ - && apt-get dist-upgrade "$@" -} - - -dpkg_Sy() { - apt-get update "$@" -} - -dpkg_Ss() { - apt-cache search "$@" -} - -dpkg_Sc() { - apt-get clean "$@" -} - -dpkg_Scc() { - apt-get autoclean "$@" -} - -dpkg_S() { - apt-get install $_TOPT "$@" -} - -dpkg_U() { - dpkg -i "$@" -} - -dpkg_Sii() { - apt-cache rdepends "$@" -} - -dpkg_Sccc() { - rm -fv /var/cache/apt/*.bin - rm -fv /var/cache/apt/archives/*.* - rm -fv /var/lib/apt/lists/*.* - apt-get autoclean -} - - - -_homebrew_init() { - : -} - -homebrew_Qi() { - brew info "$@" -} - -homebrew_Ql() { - brew list "$@" -} - -homebrew_Qo() { - local pkg prefix cellar - - # FIXME: What happens if the file is not exectutable? - cd "$(dirname -- "$(which "$@")")" || return - pkg="$(pwd -P)/$(basename -- "$@")" - prefix="$(brew --prefix)" - cellar="$(brew --cellar)" - - for package in $cellar/*; do - files=(${package}/*/${pkg/#$prefix\//}) - if [[ -e "${files[${#files[@]} - 1]}" ]]; then - echo "${package/#$cellar\//}" - break - fi - done -} - -homebrew_Qc() { - brew log "$@" -} - -homebrew_Qu() { - brew outdated | grep "$@" -} - -homebrew_Qs() { - brew list | grep "$@" -} - -homebrew_Q() { - if [[ "$_TOPT" == "" ]]; then - if [[ "$*" == "" ]]; then - brew list - else - brew list | grep "$@" - fi - else - _not_implemented - fi -} - -homebrew_Rs() { - which join > /dev/null - if [ $? -ne 0 ]; then - _die "pacapt: join binary does not exist in system." - fi - - which sort > /dev/null - if [ $? -ne 0 ]; then - _die "pacapt: sort binary does not exist in system." - fi - - if [[ "$@" == "" ]]; then - _die "pacapt: ${FUNCNAME[0]} requires arguments" - fi - - for _target in $@; - do - brew rm $_target - - while [ "$(join <(sort <(brew leaves)) <(sort <(brew deps $_target)))" != "" ] - do - brew rm $(join <(sort <(brew leaves)) <(sort <(brew deps $_target))) - done - done - -} - -homebrew_R() { - brew remove "$@" -} - -homebrew_Si() { - brew info "$@" -} - -homebrew_Suy() { - brew update \ - && brew upgrade "$@" -} - -homebrew_Su() { - brew upgrade "$@" -} - -homebrew_Sy() { - brew update "$@" -} - -homebrew_Ss() { - brew search "$@" -} - -homebrew_Sc() { - brew cleanup "$@" -} - -homebrew_Scc() { - brew cleanup -s "$@" -} - -homebrew_Sccc() { - # See more discussion in - # https://github.com/icy/pacapt/issues/47 - - local _dcache - - _dcache="$(brew --cache)" - case "$_dcache" in - ""|"/"|" ") - _error "${FUNCNAME[0]}: Unable to delete '$_dcache'." - ;; - - *) - # FIXME: This is quite stupid!!! But it's an easy way - # FIXME: to avoid some warning from #shellcheck. - # FIXME: Please note that, $_dcache is not empty now. - rm -rf "${_dcache:-/x/x/x/x/x/x/x/x/x/x/x//x/x/x/x/x/}/" - ;; - esac -} - -homebrew_S() { - brew install $_TOPT "$@" -} - - - -_macports_init() { - : -} - -macports_Ql() { - port contents "$@" -} - -macports_Qo() { - port provides "$@" -} - -macports_Qc() { - port log "$@" -} - -macports_Qu() { - port outdated "$@" -} - -macports_Rs() { - if [[ "$_TOPT" == "" ]]; then - port uninstall --follow-dependencies "$@" - else - _not_implemented - fi -} - -macports_R() { - port uninstall "$@" -} - -macports_Si() { - port info "$@" -} - -macports_Suy() { - port selfupdate \ - && port upgrade outdated "$@" -} - -macports_Su() { - port upgrade outdate "$@" -} - -macports_Sy() { - port selfupdate "$@" -} - -macports_Ss() { - port search "$@" -} - -macports_Sc() { - port clean --all inactive "$@" -} - -macports_Scc() { - port clean --all installed "$@" -} - -macports_S() { - if [[ "$_TOPT" == "fetch" ]]; then - port patch "$@" - else - port install "$@" - fi -} - - - -_pkgng_init() { - : -} - -pkgng_Qi() { - pkg info "$@" -} - -pkgng_Ql() { - pkg info -l "$@" -} - -pkgng_Qo() { - pkg which "$@" -} - -pkgng_Qp() { - pkg query -F "$@" '%n %v' -} - -pkgng_Qu() { - pkg upgrade -n "$@" -} - -pkgng_Q() { - if [[ "$_TOPT" == "q" ]]; then - pkg query '%n' "$@" - elif [[ "$_TOPT" == "" ]]; then - pkg query '%n %v' "$@" - else - _not_implemented - fi -} - -pkgng_Rs() { - if [[ "$_TOPT" == "" ]]; then - pkg remove "$@" - pkg autoremove - else - _not_implemented - fi -} - -pkgng_R() { - pkg remove "$@" -} - -pkgng_Si() { - pkg search -S name -ef "$@" -} - -pkgng_Suy() { - pkg upgrade "$@" -} - -pkgng_Su() { - pkg upgrade -U "$@" -} - -pkgng_Sy() { - pkg update "$@" -} - -pkgng_Ss() { - pkg search "$@" -} - -pkgng_Sc() { - pkg clean "$@" -} - -pkgng_Scc() { - pkg clean -a "$@" -} - -pkgng_S() { - if [[ "$_TOPT" == "fetch" ]]; then - pkg fetch "$@" - else - pkg install "$@" - fi -} - - - -_pkg_tools_init() { - : -} - -pkg_tools_Qi() { - # disable searching mirrors for packages - export PKG_PATH= - pkg_info "$@" -} - -pkg_tools_Ql() { - export PKG_PATH= - pkg_info -L "$@" -} - -pkg_tools_Qo() { - export PKG_PATH= - pkg_info -E "$@" -} - -pkg_tools_Qp() { - _not_implemented -} - -pkg_tools_Qu() { - export PKG_PATH= - pkg_add -u "$@" -} - -pkg_tools_Q() { - export PKG_PATH= - # the dash after the pkg name is so we don't catch partial matches - # because all packages in openbsd have the format 'pkgname-pkgver' - if [[ "$_TOPT" == "q" && ! -z "$*" ]]; then - pkg_info -q | grep "^${*}-" - elif [[ "$_TOPT" == "q" && -z "$*" ]];then - pkg_info -q - elif [[ "$_TOPT" == "" && ! -z "$*" ]]; then - pkg_info | grep "^${*}-" - elif [[ "$_TOPT" == "" && -z "$*" ]];then - pkg_info - else - _not_implemented - fi -} - -pkg_tools_Rs() { - if [[ "$_TOPT" == "" ]]; then - pkg_delete -D dependencies "$@" - else - _not_implemented - fi -} - -pkg_tools_Rn() { - if [[ "$_TOPT" == "" ]];then - pkg_delete -c "$@" - else - _not_implemented - fi -} - -pkg_tools_Rns() { - _not_implemented -} - -pkg_tools_R() { - pkg_delete "$@" -} - -pkg_tools_Si() { - pkg_info "$@" -} - -pkg_tools_Sl() { - pkg_info -L "$@" -} - -pkg_tools_Suy() { - # pkg_tools doesn't really have any concept of a database - # there's actually not really any database to update, so - # this function is mostly just for convienience since on arch - # doing -Su is normally a bad thing to do since it's a partial upgrade - - pkg_tools_Su "$@" -} - -pkg_tools_Su() { - pkg_add -u "$@" -} - -pkg_tools_Sy() { - _not_implemented -} - -pkg_tools_Ss() { - if [[ -z "$*" ]];then - _not_implemented - else - pkg_info -Q "$@" - fi -} - -pkg_tools_Sc() { - # by default no cache directory is used - if [[ -z "$PKG_CACHE" ]];then - echo "You have no cache directory set, set \$PKG_CACHE for a cache directory." - elif [[ ! -d "$PKG_CACHE" ]];then - echo "You have a cache directory set, but it does not exist. Create \"$PKG_CACHE\"." - else - _removing_is_dangerous "rm -rf $PKG_CACHE/*" - fi -} - -pkg_tools_Scc() { - _not_implemented -} - -pkg_tools_S() { - pkg_add "$@" -} - - - -_portage_init() { - : -} - -portage_Qi() { - emerge --info "$@" -} - -portage_Ql() { - if [[ -x '/usr/bin/qlist' ]]; then - qlist "$@" - elif [[ -x '/usr/bin/equery' ]]; then - equery files "$@" - else - _error "'portage-utils' or 'gentoolkit' package is required to perform this opreation." - fi -} - -portage_Qo() { - if [[ -x '/usr/bin/equery' ]]; then - equery belongs "$@" - else - _error "'gentoolkit' package is required to perform this operation." - fi -} - -portage_Qc() { - emerge -p --changelog "$@" -} - -portage_Qu() { - emerge -uvN "$@" -} - -portage_Q() { - if [[ "$_TOPT" == "" ]]; then - if [[ -x '/usr/bin/eix' ]]; then - eix -I "$@" - elif [[ -x '/usr/bin/equery' ]]; then - equery list -i "$@" - else - LS_COLORS=never \ - ls -1 -d /var/db/pkg/*/* - fi - else - _not_implemented - fi -} - -portage_Rs() { - if [[ "$_TOPT" == "" ]]; then - emerge --depclean world "$@" - else - _not_implemented - fi -} - -portage_R() { - emerge --depclean "@" -} - -portage_Si() { - emerge --info "$@" -} - -portage_Suy() { - if [[ -x '/usr/bin/layman' ]]; then - layman --sync-all \ - && emerge --sync \ - && emerge -auND world "$@" - else - emerge --sync \ - && emerge -uND world "$@" - fi -} - -portage_Su() { - emerge -uND world "$@" -} - -portage_Sy() { - if [[ -x "/usr/bin/layman" ]]; then - layman --sync-all \ - && emerge --sync "$@" - else - emerge --sync "$@" - fi -} - -portage_Ss() { - if [[ -x "/usr/bin/eix" ]]; then - eix "$@" - else - emerge --search "$@" - fi -} - -portage_Sc() { - if [[ -x "/usr/bin/eclean-dist" ]]; then - eclean-dist -d -t1m -s50 -f "$@" - else - _error "'gentoolkit' package is required to perform this operation." - fi -} - -portage_Scc() { - if [[ -x "/usr/bin/eclean" ]]; then - eclean -i distfiles "$@" - else - _error "'gentoolkit' package is required to perform this operation." - fi -} - -portage_Sccc() { - rm -fv /usr/portage/distfiles/*.* -} - -portage_S() { - emerge "$@" -} - - - -_sun_tools_init() { - # The purpose of `if` is to make sure this function - # can be invoked on other system (Linux, BSD). - if [[ "$(uname)" == "SunOS" ]]; then - export GREP=/usr/xpg4/bin/grep - export AWK=nawk - fi -} - -sun_tools_Qi() { - pkginfo -l "$@" -} - -sun_tools_Ql() { - pkginfo -l "$@" -} - -sun_tools_Qo() { - $GREP "$@" /var/sadm/install/contents -} - -sun_tools_Qs() { - pkginfo | $GREP -i "$@" -} - -sun_tools_Q() { - # the dash after the pkg name is so we don't catch partial matches - # because all packages in openbsd have the format 'pkgname-pkgver' - if [[ "$_TOPT" == "q" && ! -z "$*" ]]; then - pkginfo | $GREP "$@" - elif [[ "$_TOPT" == "q" && -z "$*" ]]; then - pkginfo - else - pkginfo "$@" - fi -} - -sun_tools_R() { - pkgrm "$@" -} - -sun_tools_U() { - pkgadd "$@" -} - - - -_swupd_init() { - : -} - -swupd_Qk() { - swupd verify "$@" -} - -swupd_Qo() { - swupd search "$@" -} - -swupd_Qs() { - swupd search "$@" -} - -swupd_R() { - swupd bundle-remove "$@" -} - -swupd_Suy() { - swupd update -} - -swupd_Su() { - swupd update -} - -swupd_Sy() { - swupd search -i - swupd update -} - -swupd_Ss() { - swupd search "$@" -} - -swupd_S() { - swupd bundle-add "$@" -} - - -_tazpkg_init() { - : -} - -tazpkg_Q() { - if [[ "$_TOPT" == "q" ]]; then - tazpkg list "$@" \ - | awk '{ if (NF == 2 || NF == 3) { print $1; }}' - elif [[ "$_TOPT" == "" ]]; then - tazpkg list "$@" - else - _not_implemented - fi -} - -tazpkg_Qi() { - tazpkg info "$@" -} - -tazpkg_Ql() { - if [[ -z "$*" ]]; then - _not_implemented - return - fi - - if [[ "$_TOPT" == "q" ]]; then - { - tazpkg list-files "$@" - tazpkg list-config "$@" - } \ - | grep ^/ - else - tazpkg list-files "$@" - tazpkg list-config "$@" - fi -} - -tazpkg_Sy() { - tazpkg recharge -} - -tazpkg_Su() { - tazpkg up -} - -tazpkg_Suy() { - tazpkg_Sy \ - && tazpkg_Su -} - -tazpkg_S() { - local _forced="" - - if grep -q -- "--forced" <<<"$*"; then - _forced="--forced" - fi - - while (( $# )); do - if [[ "$1" == "--forced" ]]; then - _forced="--forced" - shift - continue - fi - - tazpkg get-install "$1" $_forced - shift - done -} - -tazpkg_R() { - local _auto="" - - if grep -q -- "--auto" <<<"$*"; then - _auto="--auto" - fi - - while (( $# )); do - if [[ "$1" == "--auto" ]]; then - _auto="--auto" - shift - continue - fi - - tazpkg remove "$1" $_auto - shift - done -} - -tazpkg_Sc() { - tazpkg clean-cache -} - -tazpkg_Scc() { - tazpkg clean-cache - cd /var/lib/tazpkg/ \ - && { - rm -fv \ - ./*.bak \ - ID \ - packages.* \ - files.list.* - } -} - -tazpkg_Ss() { - tazpkg search "$@" -} - -tazpkg_Qo() { - tazpkg search-pkgname "$@" -} - -tazpkg_U() { - local _forced="" - - if grep -q -- "--forced" <<<"$*"; then - _forced="--forced" - fi - - while (( $# )); do - if [[ "$1" == "--forced" ]]; then - _forced="--forced" - shift - continue - fi - - tazpkg install "$1" $_forced - shift - done -} - - - -_tlmgr_init() { - : -} - -tlmgr_Qi() { - tlmgr info --only-installed "$@" -} - -tlmgr_Qk() { - tlmgr check files -} - -tlmgr_Ql() { - tlmgr info --only-installed --list "$@" -} - -tlmgr_R() { - tlmgr remove "$@" -} - -tlmgr_S() { - tlmgr install "$@" -} - -tlmgr_Si() { - tlmgr info "$@" -} - -tlmgr_Sl() { - tlmgr info -} - -tlmgr_Ss() { - tlmgr search --global "$@" -} - -tlmgr_Suy() { - tlmgr update --all -} - -tlmgr_U() { - tlmgr install --file "$@" -} - - - -_yum_init() { - : -} - -yum_Q() { - if [[ "$_TOPT" == "q" ]]; then - rpm -qa --qf "%{NAME}\\n" - elif [[ "$_TOPT" == "" ]]; then - rpm -qa --qf "%{NAME} %{VERSION}\\n" - else - _not_implemented - fi -} - -yum_Qi() { - yum info "$@" -} - -yum_Qs() { - rpm -qa "*${*}*" -} - -yum_Ql() { - rpm -ql "$@" -} - -yum_Qo() { - rpm -qf "$@" -} - -yum_Qp() { - rpm -qp "$@" -} - -yum_Qc() { - rpm -q --changelog "$@" -} - -yum_Qu() { - yum list updates "$@" -} - -yum_Qm() { - yum list extras "$@" -} - -yum_Rs() { - if [[ "$_TOPT" == "" ]]; then - yum erase "$@" - else - _not_implemented - fi -} - -yum_R() { - yum erase "$@" -} - -yum_Si() { - yum info "$@" -} - -yum_Suy() { - yum update "$@" -} - -yum_Su() { - yum update "$@" -} - -yum_Sy() { - yum check-update "$@" -} - -yum_Ss() { - yum -C search "$@" -} - -yum_Sc() { - yum clean expire-cache "$@" -} - -yum_Scc() { - yum clean packages "$@" -} - -yum_Sccc() { - yum clean all "$@" -} - -yum_S() { - yum install $_TOPT "$@" -} - -yum_U() { - yum localinstall "$@" -} - -yum_Sii() { - yum resolvedep "$@" -} - - - -_zypper_init() { - : -} - -zypper_Qc() { - rpm -q --changelog "$@" -} - -zypper_Qi() { - zypper info "$@" -} - -zypper_Ql() { - rpm -ql "$@" -} - -zypper_Qu() { - zypper list-updates "$@" -} - -zypper_Qm() { - zypper search -si "$@" \ - | grep 'System Packages' -} - -zypper_Qo() { - rpm -qf "$@" -} - -zypper_Qp() { - rpm -qip "$@" -} - -zypper_Qs() { - zypper search --installed-only "$@" -} - -zypper_Q() { - if [[ "$_TOPT" == "q" ]]; then - zypper search -i "$@" \ - | grep ^i \ - | awk '{print $3}' - elif [[ "$_TOPT" == "" ]]; then - zypper search -i "$@" - else - _not_implemented - fi -} - -zypper_Rs() { - if [[ "$_TOPT" == "s" ]]; then - zypper remove "$@" --clean-deps - else - _not_implemented - fi -} - -zypper_R() { - zypper remove "$@" -} - -zypper_Rn() { - # Remove configuration files - while read -r file; do - if [[ -f "$file" ]]; then - rm -fv "$file" - fi - done < <(rpm -ql "$@") - - # Now remove the package per-se - zypper remove "$@" -} - -zypper_Rs() { - if [[ "$_TOPT" == "s" ]]; then - zypper remove "$@" --clean-deps - else - _not_implemented - fi -} - -zypper_Rns() { - # Remove configuration files - while read -r file; do - if [[ -f "$file" ]]; then - rm -fv "$file" - fi - done < <(rpm -ql "$@") - - zypper remove "$@" --clean-deps -} - -zypper_Suy() { - zypper dup "$@" -} - -zypper_Sy() { - zypper refresh "$@" -} - -zypper_Sl() { - if [[ $# -eq 0 ]]; then - zypper pa -R - else - zypper pa -r "$@" - fi -} - -zypper_Ss() { - zypper search "$@" -} - -zypper_Su() { - zypper --no-refresh dup "$@" -} - -zypper_Sc() { - zypper clean "$@" -} - -zypper_Scc() { - zypper clean "$@" -} - -zypper_Sccc() { - # Not way to do this in zypper - _not_implemented -} - -zypper_Si() { - zypper info --requires "$@" -} - -zypper_Sii() { - # Ugly and slow, but does the trick - local packages= - - packages="$(zypper pa -R | cut -d \| -f 3 | tr -s '\n' ' ')" - for package in $packages; do - zypper info --requires "$package" \ - | grep -q "$@" && echo $package - done -} - -zypper_S() { - zypper install $_TOPT "$@" -} - -zypper_Sw() { - zypper install --download-only "$@" -} - -zypper_U() { - zypper install "$@" -} -_validate_operation() { - case "$1" in - "apk_Q") ;; - "apk_Qi") ;; - "apk_Ql") ;; - "apk_Qo") ;; - "apk_Qs") ;; - "apk_Qu") ;; - "apk_R") ;; - "apk_Rn") ;; - "apk_Rns") ;; - "apk_Rs") ;; - "apk_S") ;; - "apk_Sc") ;; - "apk_Scc") ;; - "apk_Sccc") ;; - "apk_Si") ;; - "apk_Sii") ;; - "apk_Sl") ;; - "apk_Ss") ;; - "apk_Su") ;; - "apk_Suy") ;; - "apk_Sy") ;; - "apk_Sw") ;; - "apk_U") ;; - "cave_Q") ;; - "cave_Qi") ;; - "cave_Ql") ;; - "cave_Qo") ;; - "cave_Qp") ;; - "cave_Qu") ;; - "cave_Qs") ;; - "cave_Rs") ;; - "cave_Rn") ;; - "cave_Rns") ;; - "cave_R") ;; - "cave_Si") ;; - "cave_Suy") ;; - "cave_Su") ;; - "cave_Sy") ;; - "cave_Ss") ;; - "cave_Sc") ;; - "cave_Scc") ;; - "cave_Sccc") ;; - "cave_S") ;; - "cave_U") ;; - "conda_Q") ;; - "conda_R") ;; - "conda_S") ;; - "conda_Sc") ;; - "conda_Si") ;; - "conda_Ss") ;; - "conda_Suy") ;; - "dnf_S") ;; - "dnf_Sc") ;; - "dnf_Scc") ;; - "dnf_Sccc") ;; - "dnf_Si") ;; - "dnf_Sg") ;; - "dnf_Sl") ;; - "dnf_Ss") ;; - "dnf_Su") ;; - "dnf_Suy") ;; - "dnf_Sw") ;; - "dnf_Sy") ;; - "dnf_Q") ;; - "dnf_Qc") ;; - "dnf_Qe") ;; - "dnf_Qi") ;; - "dnf_Ql") ;; - "dnf_Qm") ;; - "dnf_Qo") ;; - "dnf_Qp") ;; - "dnf_Qs") ;; - "dnf_Qu") ;; - "dnf_R") ;; - "dnf_U") ;; - "dpkg_Q") ;; - "dpkg_Qi") ;; - "dpkg_Ql") ;; - "dpkg_Qo") ;; - "dpkg_Qp") ;; - "dpkg_Qu") ;; - "dpkg_Qs") ;; - "dpkg_Rs") ;; - "dpkg_Rn") ;; - "dpkg_Rns") ;; - "dpkg_R") ;; - "dpkg_Si") ;; - "dpkg_Suy") ;; - "dpkg_Su") ;; - "dpkg_Sy") ;; - "dpkg_Ss") ;; - "dpkg_Sc") ;; - "dpkg_Scc") ;; - "dpkg_S") ;; - "dpkg_U") ;; - "dpkg_Sii") ;; - "dpkg_Sccc") ;; - "homebrew_Qi") ;; - "homebrew_Ql") ;; - "homebrew_Qo") ;; - "homebrew_Qc") ;; - "homebrew_Qu") ;; - "homebrew_Qs") ;; - "homebrew_Q") ;; - "homebrew_Rs") ;; - "homebrew_R") ;; - "homebrew_Si") ;; - "homebrew_Suy") ;; - "homebrew_Su") ;; - "homebrew_Sy") ;; - "homebrew_Ss") ;; - "homebrew_Sc") ;; - "homebrew_Scc") ;; - "homebrew_Sccc") ;; - "homebrew_S") ;; - "macports_Ql") ;; - "macports_Qo") ;; - "macports_Qc") ;; - "macports_Qu") ;; - "macports_Rs") ;; - "macports_R") ;; - "macports_Si") ;; - "macports_Suy") ;; - "macports_Su") ;; - "macports_Sy") ;; - "macports_Ss") ;; - "macports_Sc") ;; - "macports_Scc") ;; - "macports_S") ;; - "pkgng_Qi") ;; - "pkgng_Ql") ;; - "pkgng_Qo") ;; - "pkgng_Qp") ;; - "pkgng_Qu") ;; - "pkgng_Q") ;; - "pkgng_Rs") ;; - "pkgng_R") ;; - "pkgng_Si") ;; - "pkgng_Suy") ;; - "pkgng_Su") ;; - "pkgng_Sy") ;; - "pkgng_Ss") ;; - "pkgng_Sc") ;; - "pkgng_Scc") ;; - "pkgng_S") ;; - "pkg_tools_Qi") ;; - "pkg_tools_Ql") ;; - "pkg_tools_Qo") ;; - "pkg_tools_Qp") ;; - "pkg_tools_Qu") ;; - "pkg_tools_Q") ;; - "pkg_tools_Rs") ;; - "pkg_tools_Rn") ;; - "pkg_tools_Rns") ;; - "pkg_tools_R") ;; - "pkg_tools_Si") ;; - "pkg_tools_Sl") ;; - "pkg_tools_Suy") ;; - "pkg_tools_Su") ;; - "pkg_tools_Sy") ;; - "pkg_tools_Ss") ;; - "pkg_tools_Sc") ;; - "pkg_tools_Scc") ;; - "pkg_tools_S") ;; - "portage_Qi") ;; - "portage_Ql") ;; - "portage_Qo") ;; - "portage_Qc") ;; - "portage_Qu") ;; - "portage_Q") ;; - "portage_Rs") ;; - "portage_R") ;; - "portage_Si") ;; - "portage_Suy") ;; - "portage_Su") ;; - "portage_Sy") ;; - "portage_Ss") ;; - "portage_Sc") ;; - "portage_Scc") ;; - "portage_Sccc") ;; - "portage_S") ;; - "sun_tools_Qi") ;; - "sun_tools_Ql") ;; - "sun_tools_Qo") ;; - "sun_tools_Qs") ;; - "sun_tools_Q") ;; - "sun_tools_R") ;; - "sun_tools_U") ;; - "swupd_Qk") ;; - "swupd_Qo") ;; - "swupd_Qs") ;; - "swupd_R") ;; - "swupd_Suy") ;; - "swupd_Su") ;; - "swupd_Sy") ;; - "swupd_Ss") ;; - "swupd_S") ;; - "tazpkg_Q") ;; - "tazpkg_Qi") ;; - "tazpkg_Ql") ;; - "tazpkg_Sy") ;; - "tazpkg_Su") ;; - "tazpkg_Suy") ;; - "tazpkg_S") ;; - "tazpkg_R") ;; - "tazpkg_Sc") ;; - "tazpkg_Scc") ;; - "tazpkg_Ss") ;; - "tazpkg_Qo") ;; - "tazpkg_U") ;; - "tlmgr_Qi") ;; - "tlmgr_Qk") ;; - "tlmgr_Ql") ;; - "tlmgr_R") ;; - "tlmgr_S") ;; - "tlmgr_Si") ;; - "tlmgr_Sl") ;; - "tlmgr_Ss") ;; - "tlmgr_Suy") ;; - "tlmgr_U") ;; - "yum_Q") ;; - "yum_Qi") ;; - "yum_Qs") ;; - "yum_Ql") ;; - "yum_Qo") ;; - "yum_Qp") ;; - "yum_Qc") ;; - "yum_Qu") ;; - "yum_Qm") ;; - "yum_Rs") ;; - "yum_R") ;; - "yum_Si") ;; - "yum_Suy") ;; - "yum_Su") ;; - "yum_Sy") ;; - "yum_Ss") ;; - "yum_Sc") ;; - "yum_Scc") ;; - "yum_Sccc") ;; - "yum_S") ;; - "yum_U") ;; - "yum_Sii") ;; - "zypper_Qc") ;; - "zypper_Qi") ;; - "zypper_Ql") ;; - "zypper_Qu") ;; - "zypper_Qm") ;; - "zypper_Qo") ;; - "zypper_Qp") ;; - "zypper_Qs") ;; - "zypper_Q") ;; - "zypper_Rs") ;; - "zypper_R") ;; - "zypper_Rn") ;; - "zypper_Rs") ;; - "zypper_Rns") ;; - "zypper_Suy") ;; - "zypper_Sy") ;; - "zypper_Sl") ;; - "zypper_Ss") ;; - "zypper_Su") ;; - "zypper_Sc") ;; - "zypper_Scc") ;; - "zypper_Sccc") ;; - "zypper_Si") ;; - "zypper_Sii") ;; - "zypper_S") ;; - "zypper_Sw") ;; - "zypper_U") ;; - *) return 1 ;; - esac -} - - - -set -u -unset GREP_OPTIONS - -: "${PACAPT_DEBUG=}" # Show what will be going -: "${GREP:=grep}" # Need to update in, e.g, _sun_tools_init -: "${AWK:=awk}" # Need to update in, e.g, _sun_tools_init - -_sun_tools_init # Dirty tricky patch for SunOS - -export PACAPT_DEBUG GREP AWK - -_POPT="" # primary operation -_SOPT="" # secondary operation -_TOPT="" # options for operations -_EOPT="" # extra options (directly given to package manager) - # these options will be translated by (_translate_all) method. -_PACMAN="" # name of the package manager - -_PACMAN_detect \ -|| _die "'pacapt' doesn't support your package manager." - -if [[ -z "$PACAPT_DEBUG" ]]; then - [[ "$_PACMAN" != "pacman" ]] \ - || exec "/usr/bin/pacman" "$@" -elif [[ "$PACAPT_DEBUG" != "auto" ]]; then - _PACMAN="$PACAPT_DEBUG" -fi - -case "${1:-}" in -"update") shift; set -- -Sy "$@" ;; -"upgrade") shift; set -- -Su "$@" ;; -"install") shift; set -- -S "$@" ;; -"search") shift; set -- -Ss "$@" ;; -"remove") shift; set -- -R "$@" ;; -"autoremove") shift; set -- -Rs "$@" ;; -"clean") shift; set -- -Scc "$@" ;; -esac - -while :; do - _args="${1-}" - - [[ "${_args:0:1}" == "-" ]] || break - - case "${_args}" in - "--help") - _help - exit 0 - ;; - - "--noconfirm") - shift - _EOPT="$_EOPT:noconfirm:" - continue - ;; - - "-"|"--") - shift - break - ;; - esac - - i=1 - while [[ "$i" -lt "${#_args}" ]]; do - _opt="${_args:$i:1}" - (( i ++ )) - - case "$_opt" in - h) - _help - exit 0 - ;; - V) - _print_pacapt_version; - exit 0 - ;; - P) - _print_supported_operations "$_PACMAN" - exit 0 - ;; - - Q|S|R|U) - if [[ -n "$_POPT" && "$_POPT" != "$_opt" ]]; then - _error "Only one operation may be used at a time" - exit 1 - fi - _POPT="$_opt" - ;; - - # Comment 2015 May 26th: This part deals with the 2nd option. - # Most of the time, there is only one 2nd option. But some - # operation may need extra and/or duplicate (e.g, Sy <> Syy). - # - # See also - # - # * https://github.com/icy/pacapt/issues/13 - # - # This implementation works, but with a bug. #Rsn works - # but #Rns is translated to #Rn (incorrectly.) - # Thanks Huy-Ngo for this nice catch. - # - # FIXME: Please check pacman(8) to see if they are really 2nd operation - # - e|g|i|l|m|n|o|p|s) - if [[ "$_SOPT" == '' ]]; then - _SOPT="$_opt" - continue - fi - - # Understand it: - # If there is already an option recorded, the incoming option - # will come and compare itself with known one. - # We have a table - # - # known one vs. incoming ? | result - # < | one-new - # = | one-one - # > | new-one - # - # Let's say, after this step, the 3rd option comes (named X), - # and the current result is "a-b". We have a table - # - # a(b) vs. X | result - # < | aX (b dropped) - # = | aa (b dropped) - # > | Xa (b dropped) - # - # In any case, the first one matters. - # - if [[ "${_SOPT:0:1}" < "$_opt" ]]; then - _SOPT="${_SOPT:0:1}$_opt" - elif [[ "${_SOPT:0:1}" == "$_opt" ]]; then - _SOPT="$_opt$_opt" - else - _SOPT="$_opt${_SOPT:0:1}" - fi - - ;; - - q) - _TOPT="$_opt" ;; # Thanks to James Pearson - - u) - if [[ "${_SOPT:0:1}" == "y" ]]; then - _SOPT="uy" - else - _SOPT="u" - fi - ;; - - y) - if [[ "${_SOPT:0:1}" == "u" ]]; then - _SOPT="uy" - else - _SOPT="y" - fi - ;; - - c) - if [[ "${_SOPT:0:2}" == "cc" ]]; then - _SOPT="ccc" - elif [[ "${_SOPT:0:1}" == "c" ]]; then - _SOPT="cc" - else - _SOPT="$_opt" - fi - ;; - - w|v) - _EOPT="$_EOPT:$_opt:" - ;; - - *) - # FIXME: If option is unknown, we will break the loop - # FIXME: and this option will be used by the native program. - # FIXME: break 2 - _die "pacapt: Unknown option '$_opt'." - ;; - esac - done - - shift - - # If the primary option and the secondary are known - # we would break the argument detection, but for sure we will look - # forward to see there is anything interesting... - if [[ -n "$_POPT" && -n "$_SOPT" ]]; then - case "${1:-}" in - "-w"|"--noconfirm") ;; - *) break;; - esac - - # Don't have anything from the **first** argument. Something wrong. - # FIXME: This means that user must enter at least primary action - # FIXME: or secondary action in the very first part... - elif [[ -z "${_POPT}${_SOPT}${_TOPT}" ]]; then - break - fi -done - -[[ -n "$_POPT" ]] \ -|| _die "Usage: pacapt # -h for help, -P list supported functions" - -_validate_operation "${_PACMAN}_${_POPT}${_SOPT}" \ -|| { - _not_implemented - exit 1 -} - -_translate_all || exit - -if [[ -n "$*" ]]; then - case "${_POPT}${_SOPT}" in - "Su"|"Sy"|"Suy") - echo 1>&2 "WARNING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - echo 1>&2 " The -Sy/u options refresh and/or upgrade all packages." - echo 1>&2 " To install packages as well, use separate commands:" - echo 1>&2 - echo 1>&2 " $0 -S$_SOPT; $0 -S ${*}" - echo 1>&2 "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - esac -fi - -if [[ -n "$PACAPT_DEBUG" ]]; then - echo "pacapt: $_PACMAN, p=$_POPT, s=$_SOPT, t=$_TOPT, e=$_EOPT" - echo "pacapt: execute '${_PACMAN}_${_POPT}${_SOPT} $_EOPT ${*}'" - declare -f "${_PACMAN}_${_POPT}${_SOPT}" -else - "_${_PACMAN}_init" || exit - "${_PACMAN}_${_POPT}${_SOPT}" $_EOPT "$@" -fi From c724b0432af4cc37bff83f07cfee7f3ab712ea3a Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Tue, 8 Oct 2019 17:19:44 +1100 Subject: [PATCH 31/93] DCD-686: Initial implementation of DB restore from dump. --- roles/database_init/tasks/main.yml | 85 ++++++++++++++++++------------ 1 file changed, 51 insertions(+), 34 deletions(-) diff --git a/roles/database_init/tasks/main.yml b/roles/database_init/tasks/main.yml index 99638f7..003f7c8 100644 --- a/roles/database_init/tasks/main.yml +++ b/roles/database_init/tasks/main.yml @@ -1,40 +1,57 @@ --- -- name: Create application DB user - postgresql_user: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - port: "{{ atl_db_port }}" - name: "{{ atl_jdbc_user }}" - password: "{{ atl_jdbc_password }}" - expires: 'infinity' - tags: - - new_only +- block: -- name: Update root privs for new user - postgresql_privs: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - database: postgres - roles: "{{ atl_db_root_user }}" - objs: "{{ atl_jdbc_user }}" - type: group - tags: - - new_only + - name: Create application DB user + postgresql_user: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_user }}" + password: "{{ atl_jdbc_password }}" + expires: 'infinity' + + - name: Update root privs for new user + postgresql_privs: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + database: postgres + roles: "{{ atl_db_root_user }}" + objs: "{{ atl_jdbc_user }}" + type: group + + - name: Create new application database + postgresql_db: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_db_name }}" + owner: "{{ atl_jdbc_user }}" + encoding: "{{ atl_jdbc_encoding }}" + lc_collate: "{{ atl_jdbc_collation }}" + lc_ctype: "{{ atl_jdbc_ctype }}" + template: "{{ atl_jdbc_template }}" + when: atl_backup_db_dest is not defined + + - name: Restore application database + postgresql_db: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_db_name }}" + owner: "{{ atl_jdbc_user }}" + encoding: "{{ atl_jdbc_encoding }}" + lc_collate: "{{ atl_jdbc_collation }}" + lc_ctype: "{{ atl_jdbc_ctype }}" + template: "{{ atl_jdbc_template }}" + # Depends on fetch_backup roles + state: restore + target: "{{ atl_backup_db_dest }}" + when: atl_backup_db_dest is defined -- name: Create application database - postgresql_db: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - port: "{{ atl_db_port }}" - name: "{{ atl_jdbc_db_name }}" - owner: "{{ atl_jdbc_user }}" - encoding: "{{ atl_jdbc_encoding }}" - lc_collate: "{{ atl_jdbc_collation }}" - lc_ctype: "{{ atl_jdbc_ctype }}" - template: "{{ atl_jdbc_template }}" tags: - new_only From 32d3640bbb90412df5eebda7aa00ac8045682709 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 12:43:41 +1100 Subject: [PATCH 32/93] DCD-686: Add fetching of backups to Jira for testing. --- aws_jira_dc_node.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/aws_jira_dc_node.yml b/aws_jira_dc_node.yml index 41b7be6..6ef0765 100644 --- a/aws_jira_dc_node.yml +++ b/aws_jira_dc_node.yml @@ -21,6 +21,7 @@ - role: linux_common - role: aws_common - role: aws_shared_fs_config + - role: fetch_backups - role: product_common - role: product_install - role: database_init From cb691d25562630d088bd193edb6efee743ab1c76 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 13:14:04 +1100 Subject: [PATCH 33/93] DCD-686: We still need some python2 packages. --- bin/install-ansible | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bin/install-ansible b/bin/install-ansible index b515cdf..196562e 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -6,8 +6,12 @@ source /etc/os-release if [[ $ID = "amzn" ]]; then yum install -y \ python3-devel \ - python3-pip + python3-pip \ + python2-boto3 \ + python2-botocore + else + # FIXME: Currently assumes Debian-based apt-get update && \ apt-get install -y \ python3-dev \ From 5996a176d5f54f9b221208582848b2331fd3729d Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 13:25:32 +1100 Subject: [PATCH 34/93] DCD-686: We need postgres installed for client utils. --- bin/install-ansible | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/install-ansible b/bin/install-ansible index 196562e..98189a5 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -4,7 +4,9 @@ set -e source /etc/os-release if [[ $ID = "amzn" ]]; then + amazon-linux-extras enable postgresql9.6 yum install -y \ + postgresql \ python3-devel \ python3-pip \ python2-boto3 \ From adf9c270932fd6df095c6f3d762356cf901fe262 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 13:44:02 +1100 Subject: [PATCH 35/93] DCD-686: Ignore any roles specified in the DB dump during restore. --- roles/database_init/tasks/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/database_init/tasks/main.yml b/roles/database_init/tasks/main.yml index 003f7c8..ec5ce38 100644 --- a/roles/database_init/tasks/main.yml +++ b/roles/database_init/tasks/main.yml @@ -51,6 +51,7 @@ # Depends on fetch_backup roles state: restore target: "{{ atl_backup_db_dest }}" + target_opts: "--no-owner --role={{ atl_jdbc_user }}" when: atl_backup_db_dest is defined tags: From 9d8b47ba9ea7f8c5f2aa82352b53138c528efda0 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 13:48:50 +1100 Subject: [PATCH 36/93] DCD-686: Only download backups if changed. --- roles/fetch_backups/tasks/main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/roles/fetch_backups/tasks/main.yml b/roles/fetch_backups/tasks/main.yml index 45d7e1f..16b18b8 100644 --- a/roles/fetch_backups/tasks/main.yml +++ b/roles/fetch_backups/tasks/main.yml @@ -28,6 +28,7 @@ - name: Fetch the manifest from S3 aws_s3: mode: get + overwrite: different bucket: "{{ atl_backup_manifest_bucket }}" object: "{{ atl_backup_manifest_path }}" dest: "{{ atl_backup_manifest_dest }}" @@ -55,6 +56,7 @@ - name: Fetch DB backup from S3 aws_s3: mode: get + overwrite: different bucket: "{{ atl_backup_manifest.db_dump | urlsplit('hostname') }}" object: "{{ atl_backup_manifest.db_dump | urlsplit('path') }}" dest: "{{ atl_backup_db_dest }}" @@ -62,6 +64,7 @@ - name: Fetch Home backup from S3 aws_s3: mode: get + overwrite: different bucket: "{{ atl_backup_manifest.shared_home_dump | urlsplit('hostname') }}" object: "{{ atl_backup_manifest.shared_home_dump | urlsplit('path') }}" dest: "{{ atl_backup_home_dest }}" From 0400d8943d557f9f366dedde768d2e4afc47c275 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 9 Oct 2019 15:29:58 +1100 Subject: [PATCH 37/93] DCD-686: Only restore DB when it is a new one. --- roles/database_init/tasks/main.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/roles/database_init/tasks/main.yml b/roles/database_init/tasks/main.yml index ec5ce38..aa8da99 100644 --- a/roles/database_init/tasks/main.yml +++ b/roles/database_init/tasks/main.yml @@ -34,7 +34,7 @@ lc_collate: "{{ atl_jdbc_collation }}" lc_ctype: "{{ atl_jdbc_ctype }}" template: "{{ atl_jdbc_template }}" - when: atl_backup_db_dest is not defined + register: db_create - name: Restore application database postgresql_db: @@ -51,8 +51,7 @@ # Depends on fetch_backup roles state: restore target: "{{ atl_backup_db_dest }}" - target_opts: "--no-owner --role={{ atl_jdbc_user }}" - when: atl_backup_db_dest is defined + when: db_create.changed and atl_backup_db_dest is defined tags: - new_only From 72659de9ad8de6238eaaefa2d65c1cb1cc7ba564 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 10 Oct 2019 09:30:45 +1100 Subject: [PATCH 38/93] DCD-686: Install Postgres only if restoration is required. --- bin/install-ansible | 2 -- roles/fetch_backups/tasks/amazon.yml | 7 +++++++ roles/fetch_backups/tasks/main.yml | 3 +++ 3 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 roles/fetch_backups/tasks/amazon.yml diff --git a/bin/install-ansible b/bin/install-ansible index 98189a5..196562e 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -4,9 +4,7 @@ set -e source /etc/os-release if [[ $ID = "amzn" ]]; then - amazon-linux-extras enable postgresql9.6 yum install -y \ - postgresql \ python3-devel \ python3-pip \ python2-boto3 \ diff --git a/roles/fetch_backups/tasks/amazon.yml b/roles/fetch_backups/tasks/amazon.yml new file mode 100644 index 0000000..bf32125 --- /dev/null +++ b/roles/fetch_backups/tasks/amazon.yml @@ -0,0 +1,7 @@ +--- + +# Amazon Linux 2 supplies extra packages via a special command. +- name: Enable Postgresql from 'extras' + command: amazon-linux-extras install -y "postgresql{{ postgres_version }}" + args: + creates: /usr/bin/psql diff --git a/roles/fetch_backups/tasks/main.yml b/roles/fetch_backups/tasks/main.yml index 16b18b8..27c5efe 100644 --- a/roles/fetch_backups/tasks/main.yml +++ b/roles/fetch_backups/tasks/main.yml @@ -69,4 +69,7 @@ object: "{{ atl_backup_manifest.shared_home_dump | urlsplit('path') }}" dest: "{{ atl_backup_home_dest }}" + - name: Install distro-specific restore support packages + include_tasks: "{{ ansible_distribution|lower }}.yml" + when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From dddf3a86ec4320dd432778cf76a47f1ccca137db Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 10 Oct 2019 09:39:50 +1100 Subject: [PATCH 39/93] DCD-686: Add comment about manifest format. --- roles/fetch_backups/tasks/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/roles/fetch_backups/tasks/main.yml b/roles/fetch_backups/tasks/main.yml index 27c5efe..a5a87de 100644 --- a/roles/fetch_backups/tasks/main.yml +++ b/roles/fetch_backups/tasks/main.yml @@ -40,6 +40,8 @@ dest: "{{ atl_backup_manifest_dest }}" when: atl_backup_manifest_url.scheme != 's3' + # FIXME: The manifest format is still undecided; everything + # referencing this variable should be considered a placeholder. - name: Load parameters from manifest include_vars: file: "{{ atl_backup_manifest_dest }}" From 6f56925fa12c8646e733574a32e37e1eaaafd9a3 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 10 Oct 2019 11:05:20 +1100 Subject: [PATCH 40/93] DCD-686: Move restore operations into the fetch role for the time being. --- roles/database_init/tasks/main.yml | 19 +-------- roles/fetch_backups/defaults/main.yml | 4 ++ roles/fetch_backups/tasks/main.yml | 56 ++++++++++++++++++++++++++- 3 files changed, 59 insertions(+), 20 deletions(-) create mode 100644 roles/fetch_backups/defaults/main.yml diff --git a/roles/database_init/tasks/main.yml b/roles/database_init/tasks/main.yml index aa8da99..8827f99 100644 --- a/roles/database_init/tasks/main.yml +++ b/roles/database_init/tasks/main.yml @@ -34,24 +34,7 @@ lc_collate: "{{ atl_jdbc_collation }}" lc_ctype: "{{ atl_jdbc_ctype }}" template: "{{ atl_jdbc_template }}" - register: db_create - - - name: Restore application database - postgresql_db: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - port: "{{ atl_db_port }}" - name: "{{ atl_jdbc_db_name }}" - owner: "{{ atl_jdbc_user }}" - encoding: "{{ atl_jdbc_encoding }}" - lc_collate: "{{ atl_jdbc_collation }}" - lc_ctype: "{{ atl_jdbc_ctype }}" - template: "{{ atl_jdbc_template }}" - # Depends on fetch_backup roles - state: restore - target: "{{ atl_backup_db_dest }}" - when: db_create.changed and atl_backup_db_dest is defined + register: db_created tags: - new_only diff --git a/roles/fetch_backups/defaults/main.yml b/roles/fetch_backups/defaults/main.yml new file mode 100644 index 0000000..6561c7e --- /dev/null +++ b/roles/fetch_backups/defaults/main.yml @@ -0,0 +1,4 @@ +--- + +atl_backup_home_restore_canary_filename: ".slingshot_home_restore" +atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/{{ atl_backup_home_restore_canary_filename }}" diff --git a/roles/fetch_backups/tasks/main.yml b/roles/fetch_backups/tasks/main.yml index a5a87de..5dcdf01 100644 --- a/roles/fetch_backups/tasks/main.yml +++ b/roles/fetch_backups/tasks/main.yml @@ -3,6 +3,15 @@ # This role will attempt to fetch and load the backup manifest from a # remote HTTP or S3 URL. On successful completion the contents of JSON # or YAML document will be in the var `atl_backup_manifest`. +# +# PREREQUISITES: +# * `atl_backup_manifest_url` points at the manifest. +# * The shared home filesystem is mounted if necessary (e.g. NFS/EFS). +# * The database has been created and the variable `db_created` is +# registered with the result (i.e: `register: db_created`). +# +# NOTE: The actual DB/FS restore operations could potentially be split +# out into discrete roles, but currently that is not required. - block: @@ -40,8 +49,6 @@ dest: "{{ atl_backup_manifest_dest }}" when: atl_backup_manifest_url.scheme != 's3' - # FIXME: The manifest format is still undecided; everything - # referencing this variable should be considered a placeholder. - name: Load parameters from manifest include_vars: file: "{{ atl_backup_manifest_dest }}" @@ -49,6 +56,9 @@ - name: Define the DB and home dump destinations set_fact: + # FIXME: The manifest format is still undecided so the + # following usages will need to be updated once it settles.. + atl_backup_id: "{{ atl_backup_manifest.name }}" atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.db_dump | basename }}" atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.shared_home_dump | basename }}" @@ -74,4 +84,46 @@ - name: Install distro-specific restore support packages include_tasks: "{{ ansible_distribution|lower }}.yml" + + - name: Restore application database + postgresql_db: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_db_name }}" + owner: "{{ atl_jdbc_user }}" + encoding: "{{ atl_jdbc_encoding }}" + lc_collate: "{{ atl_jdbc_collation }}" + lc_ctype: "{{ atl_jdbc_ctype }}" + template: "{{ atl_jdbc_template }}" + # Depends on fetch_backup roles + state: restore + target: "{{ atl_backup_db_dest }}" + when: db_created.changed and atl_backup_db_dest is defined + + + - name: Check for the restore canary file + stat: + path: "{{ atl_backup_home_restore_canary_path }}" + register: restore_canary + + - name: Create shared home if necessary + file: + path: "{{ atl_product_home_shared }}" + state: directory + mode: 0750 + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + when: restore_canary.stat.exists + + - name: Restore the shared-home backup + unarchive: + path: "{{ atl_backup_home_restore_canary_path }}" + dest: "{{ atl_product_home_shared }}" + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + when: restore_canary.stat.exists + + when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From 93c359d0295b84854c8a4124d5578fde829cd916 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 10 Oct 2019 11:12:18 +1100 Subject: [PATCH 41/93] DCD-686: Rename role to reflect updated functionality and exand restore functionality. --- aws_jira_dc_node.yml | 2 +- .../defaults/main.yml | 0 .../tasks/amazon.yml | 0 .../tasks/main.yml | 37 +++++++++++-------- 4 files changed, 23 insertions(+), 16 deletions(-) rename roles/{fetch_backups => restore_backups}/defaults/main.yml (100%) rename roles/{fetch_backups => restore_backups}/tasks/amazon.yml (100%) rename roles/{fetch_backups => restore_backups}/tasks/main.yml (84%) diff --git a/aws_jira_dc_node.yml b/aws_jira_dc_node.yml index 6ef0765..b0470fb 100644 --- a/aws_jira_dc_node.yml +++ b/aws_jira_dc_node.yml @@ -21,9 +21,9 @@ - role: linux_common - role: aws_common - role: aws_shared_fs_config - - role: fetch_backups - role: product_common - role: product_install - role: database_init + - role: restore_backups - role: jira_config - role: product_startup diff --git a/roles/fetch_backups/defaults/main.yml b/roles/restore_backups/defaults/main.yml similarity index 100% rename from roles/fetch_backups/defaults/main.yml rename to roles/restore_backups/defaults/main.yml diff --git a/roles/fetch_backups/tasks/amazon.yml b/roles/restore_backups/tasks/amazon.yml similarity index 100% rename from roles/fetch_backups/tasks/amazon.yml rename to roles/restore_backups/tasks/amazon.yml diff --git a/roles/fetch_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml similarity index 84% rename from roles/fetch_backups/tasks/main.yml rename to roles/restore_backups/tasks/main.yml index 5dcdf01..a0023cf 100644 --- a/roles/fetch_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -108,22 +108,29 @@ path: "{{ atl_backup_home_restore_canary_path }}" register: restore_canary - - name: Create shared home if necessary - file: - path: "{{ atl_product_home_shared }}" - state: directory - mode: 0750 - owner: "{{ atl_product_user }}" - group: "{{ atl_product_user }}" - when: restore_canary.stat.exists + - block: - - name: Restore the shared-home backup - unarchive: - path: "{{ atl_backup_home_restore_canary_path }}" - dest: "{{ atl_product_home_shared }}" - owner: "{{ atl_product_user }}" - group: "{{ atl_product_user }}" - when: restore_canary.stat.exists + - name: Create shared home if necessary + file: + path: "{{ atl_product_home_shared }}" + state: directory + mode: 0750 + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + + - name: Restore the shared-home backup + unarchive: + path: "{{ atl_backup_home_restore_canary_path }}" + dest: "{{ atl_product_home_shared }}" + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + + - name: Create restore-canary if necessary + copy: + dest: "{{ atl_backup_home_restore_canary_path }}" + content: "{{ atl_backup_id }}" + + when: not restore_canary.stat.exists when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' From 8a343f7e8c66e4b4fd5b3b519495b76108c716d2 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 10 Oct 2019 11:26:47 +1100 Subject: [PATCH 42/93] DCD-686: Fix unarchive operation. --- roles/restore_backups/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index a0023cf..019c5d5 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -120,7 +120,7 @@ - name: Restore the shared-home backup unarchive: - path: "{{ atl_backup_home_restore_canary_path }}" + src: "{{ atl_backup_home_restore_canary_path }}" dest: "{{ atl_product_home_shared }}" owner: "{{ atl_product_user }}" group: "{{ atl_product_user }}" From 8254482761e5b73b1b420fd91c59495cf639a95d Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 10 Oct 2019 11:28:56 +1100 Subject: [PATCH 43/93] DCD-686: Fix tarball location. --- roles/restore_backups/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index 019c5d5..f53b685 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -120,7 +120,7 @@ - name: Restore the shared-home backup unarchive: - src: "{{ atl_backup_home_restore_canary_path }}" + src: "{{ atl_backup_home_dest }}" dest: "{{ atl_product_home_shared }}" owner: "{{ atl_product_user }}" group: "{{ atl_product_user }}" From 46a2bad4b8ce01ead1c4ba77f1532836fc6288e8 Mon Sep 17 00:00:00 2001 From: Varun Arbatti <1063972+theghostwhoforks@users.noreply.github.com> Date: Thu, 10 Oct 2019 14:51:06 +1100 Subject: [PATCH 44/93] DCD-742: Updates manifest structure in ansible --- roles/restore_backups/tasks/main.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index f53b685..d12d073 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -59,8 +59,8 @@ # FIXME: The manifest format is still undecided so the # following usages will need to be updated once it settles.. atl_backup_id: "{{ atl_backup_manifest.name }}" - atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.db_dump | basename }}" - atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.shared_home_dump | basename }}" + atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.db.location.value | basename }}" + atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.shared_home.location.value | basename }}" # FIXME: Here we fetch the backups. However we may wish to stream # these directly from S3 to the target DB/FS to avoid requiring @@ -69,16 +69,16 @@ aws_s3: mode: get overwrite: different - bucket: "{{ atl_backup_manifest.db_dump | urlsplit('hostname') }}" - object: "{{ atl_backup_manifest.db_dump | urlsplit('path') }}" + bucket: "{{ atl_backup_manifest.artifacts.db.location.value | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.artifacts.db.location.value | urlsplit('path') }}" dest: "{{ atl_backup_db_dest }}" - name: Fetch Home backup from S3 aws_s3: mode: get overwrite: different - bucket: "{{ atl_backup_manifest.shared_home_dump | urlsplit('hostname') }}" - object: "{{ atl_backup_manifest.shared_home_dump | urlsplit('path') }}" + bucket: "{{ atl_backup_manifest.artifacts.shared_home.location.value | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.artifacts.shared_home.location.value | urlsplit('path') }}" dest: "{{ atl_backup_home_dest }}" - name: Install distro-specific restore support packages From 054e171da3c8c925534d979cbe60bfebafd5f589 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Thu, 10 Oct 2019 16:12:06 +1100 Subject: [PATCH 45/93] ITOPSENG-164 Save the binary to shared home adding a locking mechanism to avoid potential race condition. This also allows downloading of old binaries once no longer available to download --- roles/product_install/defaults/main.yml | 4 + roles/product_install/tasks/main.yml | 79 ++++++++++++++++--- .../tasks/unpack_installer.yml | 2 +- 3 files changed, 74 insertions(+), 11 deletions(-) diff --git a/roles/product_install/defaults/main.yml b/roles/product_install/defaults/main.yml index 211e76e..0741a94 100644 --- a/roles/product_install/defaults/main.yml +++ b/roles/product_install/defaults/main.yml @@ -22,6 +22,10 @@ atl_product_download_filename: "{{ atl_download_edition | default(atl_product_ed atl_product_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}" atl_product_varfile: "{{ atl_installer_temp }}/{{ atl_product_family }}.varfile" +atl_product_home_shared_download_dir: "{{ atl_product_home_shared }}/downloads" +atl_product_home_shared_download: "{{ atl_product_home_shared_download_dir }}/{{ atl_product_download_filename }}" +atl_product_home_shared_download_lockdir: "{{ atl_product_home_shared_download }}_downloaded" + atl_marketplace_base: "https://marketplace.atlassian.com" atl_servicedesk_latest_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/latest" atl_servicedesk_versioned_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/name/{{ atl_product_version }}" diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 77371fa..94cd6ba 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -116,6 +116,7 @@ - "{{ atl_product_home }}" - "{{ atl_product_installation_versioned }}" - "{{ atl_product_version_cache_dir }}" + - "{{ atl_product_home_shared_download_dir }}" changed_when: false # For Molecule idempotence check # At this point atl_product_version should be set, cache if necessary. @@ -125,17 +126,75 @@ dest: "{{ atl_product_version_cache }}" force: true +# For the first run a temp binary should be downloaded but moved to shared home to ensure all subsequent nodes have access +# to the same specific version binary. +# To prevent a race condition with multiple downloads at the same time a directory is used as a lockfile (atomic operation). -# Note: We don't the cache binary in the shared drive to the complexity -# around download race-conditions if multiple nodes are starting at -# the same time. When downloading from product-downloads.atlassian.com -# (which is a CDN) takes seconds anyway. -- name: Fetch product installer - get_url: - url: "{{ atl_product_download_url }}" - dest: "{{ atl_product_download }}" - mode: 0755 - force: false +- name: Assume temp binary should be downloaded + set_fact: + download_binary: yes + +# Check for product installer in home_shared and lockdir to determine if it needs to be downloaded again. +- name: Check download lock directory exists + stat: + path: "{{ atl_product_home_shared_download_lockdir }}" + register: download_lockdir + +- name: Check for presence of product installer in home_shared + stat: + path: "{{ atl_product_home_shared_download }}" + register: home_shared_downloaded + +# If binary exists and lockdir exists use this binary instead +- name: Check Lock Directory and binary exists on shared_home + set_fact: + download_binary: no + when: + - home_shared_downloaded.stat.exists + - download_lockdir.stat.isdir is defined + - download_lockdir.stat.isdir + +# If the binary was never installed, download it +- name: "Download product installer and move to shared directory" + block: + + - name: Fetch product installer + get_url: + url: "{{ atl_product_download_url }}" + dest: "{{ atl_product_download }}" + mode: 0755 + force: false + register: atl_product_downloaded + + - name: Remove lockdir to prevent nodes relying on binary when copying + file: + path: "{{ atl_product_home_shared_download_lockdir }}" + state: absent + when: atl_product_downloaded is succeeded + register: lockdir_removed + + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: yes + when: lockdir_removed is succeeded + register: copied + + - name: Delete old temp installer + file: + path: "{{ atl_product_download }}" + state: absent + when: copied is succeeded + register: temp_deleted + + - name: Create lockdir once product installer downloaded and moved + file: + path: "{{ atl_product_home_shared_download_lockdir }}" + state: directory + when: temp_deleted is succeeded + + when: download_binary - name: Unpack the downloaded application depending on format include_tasks: "unpack_{{ atl_download_format }}.yml" diff --git a/roles/product_install/tasks/unpack_installer.yml b/roles/product_install/tasks/unpack_installer.yml index 925dca0..11d37e3 100644 --- a/roles/product_install/tasks/unpack_installer.yml +++ b/roles/product_install/tasks/unpack_installer.yml @@ -11,7 +11,7 @@ # will create 'jira1'; this potentially creates idempotency/upgrade # issues down the line. - name: Run the installer - command: /bin/sh "{{ atl_product_download }}" -q -varfile "{{ atl_product_varfile }}" + command: /bin/sh "{{ atl_product_home_shared_download }}" -q -varfile "{{ atl_product_varfile }}" args: creates: "{{ atl_product_installation_versioned }}/.install4j/" become: true From 3ed48340b3553a811fdc948c467e902f3a3c1e1a Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Thu, 10 Oct 2019 16:52:51 +1100 Subject: [PATCH 46/93] ITOPSENG-164 Yamllint fix --- roles/product_install/tasks/main.yml | 81 ++++++++++++++-------------- 1 file changed, 42 insertions(+), 39 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 94cd6ba..8a90eb3 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -126,15 +126,18 @@ dest: "{{ atl_product_version_cache }}" force: true -# For the first run a temp binary should be downloaded but moved to shared home to ensure all subsequent nodes have access +# For the first run a temp binary should be downloaded but moved to +# shared home to ensure all subsequent nodes have access # to the same specific version binary. -# To prevent a race condition with multiple downloads at the same time a directory is used as a lockfile (atomic operation). +# To prevent a race condition with multiple downloads at the same time +# a directory is used as a lockfile (atomic operation). - name: Assume temp binary should be downloaded set_fact: - download_binary: yes + download_binary: true -# Check for product installer in home_shared and lockdir to determine if it needs to be downloaded again. +# Check for product installer in home_shared and lockdir to determine +# if it needs to be downloaded again. - name: Check download lock directory exists stat: path: "{{ atl_product_home_shared_download_lockdir }}" @@ -148,51 +151,51 @@ # If binary exists and lockdir exists use this binary instead - name: Check Lock Directory and binary exists on shared_home set_fact: - download_binary: no + download_binary: false when: - - home_shared_downloaded.stat.exists - - download_lockdir.stat.isdir is defined - - download_lockdir.stat.isdir + - home_shared_downloaded.stat.exists + - download_lockdir.stat.isdir is defined + - download_lockdir.stat.isdir # If the binary was never installed, download it - name: "Download product installer and move to shared directory" block: - - name: Fetch product installer - get_url: - url: "{{ atl_product_download_url }}" - dest: "{{ atl_product_download }}" - mode: 0755 - force: false - register: atl_product_downloaded + - name: Fetch product installer + get_url: + url: "{{ atl_product_download_url }}" + dest: "{{ atl_product_download }}" + mode: 0755 + force: false + register: atl_product_downloaded - - name: Remove lockdir to prevent nodes relying on binary when copying - file: - path: "{{ atl_product_home_shared_download_lockdir }}" - state: absent - when: atl_product_downloaded is succeeded - register: lockdir_removed + - name: Remove lockdir to prevent nodes relying on binary when copying + file: + path: "{{ atl_product_home_shared_download_lockdir }}" + state: absent + when: atl_product_downloaded is succeeded + register: lockdir_removed - - name: Copy temp installer to home_shared - copy: - src: "{{ atl_product_download }}" - dest: "{{ atl_product_home_shared_download }}" - remote_src: yes - when: lockdir_removed is succeeded - register: copied + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: true + when: lockdir_removed is succeeded + register: copied - - name: Delete old temp installer - file: - path: "{{ atl_product_download }}" - state: absent - when: copied is succeeded - register: temp_deleted + - name: Delete old temp installer + file: + path: "{{ atl_product_download }}" + state: absent + when: copied is succeeded + register: temp_deleted - - name: Create lockdir once product installer downloaded and moved - file: - path: "{{ atl_product_home_shared_download_lockdir }}" - state: directory - when: temp_deleted is succeeded + - name: Create lockdir once product installer downloaded and moved + file: + path: "{{ atl_product_home_shared_download_lockdir }}" + state: directory + when: temp_deleted is succeeded when: download_binary From b3dffa684f112196438ed29830556ce2d573c47b Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 11 Oct 2019 14:20:21 +1100 Subject: [PATCH 47/93] ITOPSENG-164 Further testing required for logic change --- roles/product_install/defaults/main.yml | 5 +- roles/product_install/tasks/main.yml | 136 +++++++++++------- .../tasks/unpack_installer.yml | 4 +- 3 files changed, 93 insertions(+), 52 deletions(-) diff --git a/roles/product_install/defaults/main.yml b/roles/product_install/defaults/main.yml index 0741a94..8798ac7 100644 --- a/roles/product_install/defaults/main.yml +++ b/roles/product_install/defaults/main.yml @@ -19,12 +19,13 @@ atl_product_base_url: "{{ atl_release_base_url }}/{{ atl_product_family }}/downl atl_product_download_url: "{{ atl_product_base_url }}/atlassian-{{ atl_download_edition | default(atl_product_edition) }}-{{ atl_product_version }}{{ atl_download_suffix }}" atl_product_download_filename: "{{ atl_download_edition | default(atl_product_edition) }}.{{ atl_product_version }}{{ atl_download_suffix }}" -atl_product_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}" +atl_product_temp_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}" atl_product_varfile: "{{ atl_installer_temp }}/{{ atl_product_family }}.varfile" atl_product_home_shared_download_dir: "{{ atl_product_home_shared }}/downloads" atl_product_home_shared_download: "{{ atl_product_home_shared_download_dir }}/{{ atl_product_download_filename }}" -atl_product_home_shared_download_lockdir: "{{ atl_product_home_shared_download }}_downloaded" +atl_product_home_shared_lockdir_moving: "{{ atl_product_home_shared_download }}_moving" +atl_product_home_shared_lockdir_downloaded: "{{ atl_product_home_shared_download }}_downloaded" atl_marketplace_base: "https://marketplace.atlassian.com" atl_servicedesk_latest_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/latest" diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 8a90eb3..0d78a2c 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -132,73 +132,52 @@ # To prevent a race condition with multiple downloads at the same time # a directory is used as a lockfile (atomic operation). -- name: Assume temp binary should be downloaded +- name: Assume temp binary should be downloaded and used set_fact: download_binary: true + atl_product_download: "{{ atl_product_temp_download }}" # Check for product installer in home_shared and lockdir to determine # if it needs to be downloaded again. -- name: Check download lock directory exists +- name: Check moving lock directory does not exist stat: - path: "{{ atl_product_home_shared_download_lockdir }}" - register: download_lockdir + path: "{{ atl_product_home_shared_moving_lock }}" + register: moving_lock + +- name: Check downloaded lock directory exists + stat: + path: "{{ atl_product_home_shared_downloaded_lock }}" + register: downloaded_lock - name: Check for presence of product installer in home_shared stat: path: "{{ atl_product_home_shared_download }}" - register: home_shared_downloaded + register: home_shared_download # If binary exists and lockdir exists use this binary instead -- name: Check Lock Directory and binary exists on shared_home +- name: Check lock directory and binary exists on shared_home set_fact: download_binary: false + atl_product_download: "{{ atl_product_home_shared_download }}" when: - - home_shared_downloaded.stat.exists - - download_lockdir.stat.isdir is defined - - download_lockdir.stat.isdir - -# If the binary was never installed, download it -- name: "Download product installer and move to shared directory" - block: - - - name: Fetch product installer - get_url: - url: "{{ atl_product_download_url }}" - dest: "{{ atl_product_download }}" - mode: 0755 - force: false - register: atl_product_downloaded - - - name: Remove lockdir to prevent nodes relying on binary when copying - file: - path: "{{ atl_product_home_shared_download_lockdir }}" - state: absent - when: atl_product_downloaded is succeeded - register: lockdir_removed - - - name: Copy temp installer to home_shared - copy: - src: "{{ atl_product_download }}" - dest: "{{ atl_product_home_shared_download }}" - remote_src: true - when: lockdir_removed is succeeded - register: copied - - - name: Delete old temp installer - file: - path: "{{ atl_product_download }}" - state: absent - when: copied is succeeded - register: temp_deleted - - - name: Create lockdir once product installer downloaded and moved - file: - path: "{{ atl_product_home_shared_download_lockdir }}" - state: directory - when: temp_deleted is succeeded + - home_shared_download.stat.exists + - downloaded_lock.stat.isdir is defined + - downloaded_lock.stat.isdir + - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) +# If the binary was never installed, download it to temp location +- name: Installer not on home_shared. Fetch it. + get_url: + url: "{{ atl_product_download_url }}" + dest: "{{ atl_product_temp_download }}" + mode: 0755 + force: false + register: atl_product_downloaded when: download_binary +# If product installer was fetched to temp, install from there +# If product installer was pre-downloaded on shared_home, install from there +# This is determined by {{ atl_product_download }} variable - name: Unpack the downloaded application depending on format include_tasks: "unpack_{{ atl_download_format }}.yml" @@ -208,3 +187,62 @@ dest: "{{ atl_product_installation_current }}" state: link force: true + +# # Product is installed. If the following are true, move to home_shared +# # 1. This node just downloaded binary. +# # 2. Another node is not already moving into place. +# - name: "Move product installer" +# block: + +# - name: Check moving lock directory does not exist +# stat: +# path: "{{ atl_product_home_shared_moving_lock }}" +# register: moving_lock + +# - name: Check downloaded lock directory exists +# stat: +# path: "{{ atl_product_home_shared_downloaded_lock }}" +# register: downloaded_lock + +# - name: Check for presence of product installer in home_shared +# stat: +# path: "{{ atl_product_home_shared_download }}" +# register: home_shared_download + + + + + + + +# - name: Remove lockdir to prevent nodes relying on binary when copying +# file: +# path: "{{ atl_product_home_shared_download_lockdir }}" +# state: absent +# when: atl_product_downloaded is succeeded +# register: lockdir_removed + +# - name: Copy temp installer to home_shared +# copy: +# src: "{{ atl_product_download }}" +# dest: "{{ atl_product_home_shared_download }}" +# remote_src: true +# when: lockdir_removed is succeeded +# register: copied + +# - name: Delete old temp installer +# file: +# path: "{{ atl_product_download }}" +# state: absent +# when: copied is succeeded +# register: temp_deleted + +# - name: Create lockdir once product installer downloaded and moved +# file: +# path: "{{ atl_product_home_shared_download_lockdir }}" +# state: directory +# when: temp_deleted is succeeded + + + + diff --git a/roles/product_install/tasks/unpack_installer.yml b/roles/product_install/tasks/unpack_installer.yml index 11d37e3..f340463 100644 --- a/roles/product_install/tasks/unpack_installer.yml +++ b/roles/product_install/tasks/unpack_installer.yml @@ -10,8 +10,10 @@ # actions. For example, if root and the 'jira' user exists then it # will create 'jira1'; this potentially creates idempotency/upgrade # issues down the line. +# The variable {{ atl_product_download }} will be on temp for first nodes and shared_home for +# subsequent nodes. - name: Run the installer - command: /bin/sh "{{ atl_product_home_shared_download }}" -q -varfile "{{ atl_product_varfile }}" + command: /bin/sh "{{ atl_product_download }}" -q -varfile "{{ atl_product_varfile }}" args: creates: "{{ atl_product_installation_versioned }}/.install4j/" become: true From 894501e9d191feb7b069b15b1c45bac5c3bcd38d Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 11 Oct 2019 14:32:50 +1100 Subject: [PATCH 48/93] ITOPSENG-164 Additional default variables --- roles/product_install/defaults/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/product_install/defaults/main.yml b/roles/product_install/defaults/main.yml index 8798ac7..61d8d04 100644 --- a/roles/product_install/defaults/main.yml +++ b/roles/product_install/defaults/main.yml @@ -24,8 +24,8 @@ atl_product_varfile: "{{ atl_installer_temp }}/{{ atl_product_family }}.varfile" atl_product_home_shared_download_dir: "{{ atl_product_home_shared }}/downloads" atl_product_home_shared_download: "{{ atl_product_home_shared_download_dir }}/{{ atl_product_download_filename }}" -atl_product_home_shared_lockdir_moving: "{{ atl_product_home_shared_download }}_moving" -atl_product_home_shared_lockdir_downloaded: "{{ atl_product_home_shared_download }}_downloaded" +atl_product_home_shared_moving_lock: "{{ atl_product_home_shared_download }}_moving" +atl_product_home_shared_downloaded_lock: "{{ atl_product_home_shared_download }}_downloaded" atl_marketplace_base: "https://marketplace.atlassian.com" atl_servicedesk_latest_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/latest" From b73381e907d125f304efe65107ff20443db082b9 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 11 Oct 2019 14:52:35 +1100 Subject: [PATCH 49/93] ITOPSENG-164 Further testing required for logic change --- roles/product_install/tasks/main.yml | 129 +++++++++++++++------------ 1 file changed, 73 insertions(+), 56 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 0d78a2c..a23f98b 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -130,26 +130,27 @@ # shared home to ensure all subsequent nodes have access # to the same specific version binary. # To prevent a race condition with multiple downloads at the same time -# a directory is used as a lockfile (atomic operation). +# a directory is used as a lockfile (atomic operation) when moving binary. -- name: Assume temp binary should be downloaded and used +- name: Set assumptions to avoid race condition set_fact: download_binary: true + move_binary: false atl_product_download: "{{ atl_product_temp_download }}" # Check for product installer in home_shared and lockdir to determine # if it needs to be downloaded again. -- name: Check moving lock directory does not exist +- name: Check for moving lock directory stat: path: "{{ atl_product_home_shared_moving_lock }}" register: moving_lock -- name: Check downloaded lock directory exists +- name: Check for downloaded lock directory stat: path: "{{ atl_product_home_shared_downloaded_lock }}" register: downloaded_lock -- name: Check for presence of product installer in home_shared +- name: Check for product installer in home_shared stat: path: "{{ atl_product_home_shared_download }}" register: home_shared_download @@ -188,60 +189,76 @@ state: link force: true -# # Product is installed. If the following are true, move to home_shared -# # 1. This node just downloaded binary. -# # 2. Another node is not already moving into place. -# - name: "Move product installer" -# block: +# Product is installed. If the following are true, move to home_shared +# 1. This node just downloaded binary. +# 2. Another node is not already moving into place. +- name: "Move product installer" + block: -# - name: Check moving lock directory does not exist -# stat: -# path: "{{ atl_product_home_shared_moving_lock }}" -# register: moving_lock + - name: Check again for moving lock directory + stat: + path: "{{ atl_product_home_shared_moving_lock }}" + register: moving_lock_2 -# - name: Check downloaded lock directory exists -# stat: -# path: "{{ atl_product_home_shared_downloaded_lock }}" -# register: downloaded_lock + - name: Check again for downloaded lock directory + stat: + path: "{{ atl_product_home_shared_downloaded_lock }}" + register: downloaded_lock_2 -# - name: Check for presence of product installer in home_shared -# stat: -# path: "{{ atl_product_home_shared_download }}" -# register: home_shared_download - - - - - - - -# - name: Remove lockdir to prevent nodes relying on binary when copying -# file: -# path: "{{ atl_product_home_shared_download_lockdir }}" -# state: absent -# when: atl_product_downloaded is succeeded -# register: lockdir_removed - -# - name: Copy temp installer to home_shared -# copy: -# src: "{{ atl_product_download }}" -# dest: "{{ atl_product_home_shared_download }}" -# remote_src: true -# when: lockdir_removed is succeeded -# register: copied - -# - name: Delete old temp installer -# file: -# path: "{{ atl_product_download }}" -# state: absent -# when: copied is succeeded -# register: temp_deleted - -# - name: Create lockdir once product installer downloaded and moved -# file: -# path: "{{ atl_product_home_shared_download_lockdir }}" -# state: directory -# when: temp_deleted is succeeded + - name: Check again for product installer in home_shared + stat: + path: "{{ atl_product_home_shared_download }}" + register: home_shared_download_2 + + # If binary exists and lockdir exists use this binary instead + - name: Check lock directory and binary exists on shared_home + set_fact: + move_binary: true + atl_product_download: "{{ atl_product_home_shared_download }}" + when: + - home_shared_download.stat.exists == False + - ( downloaded_lock.stat.isdir is not defined or downloaded_lock.stat.isdir == False ) + - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) + + - name: Create moving_lock to ensure other nodes skip + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: directory + when: move_binary + register: moving_lock_created + + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_temp_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: true + when: moving_lock_created is succeeded + register: copied + + - name: Create downloaded_lock once product installer downloaded and copied + file: + path: "{{ atl_product_home_shared_downloaded_lock }}" + state: directory + when: copied is succeeded + register: downloaded_lock_created + + - name: Remove moving_lock to show that binary is completed + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: absent + when: + - downloaded_lock_created is succeeded + - copied is succeeded + register: moving_lock_removed + + - name: Delete old temp installer + file: + path: "{{ atl_product_temp_download }}" + state: absent + when: moving_lock_removed is succeeded + register: temp_deleted + + when: move_binary From 5d10948feb5ece672309e3dc58a458386a93180f Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 11 Oct 2019 14:54:59 +1100 Subject: [PATCH 50/93] ITOPSENG-164 Further testing required for logic change --- roles/product_install/tasks/main.yml | 66 ++++++++++++++-------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index a23f98b..d6c7b61 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -220,43 +220,43 @@ - ( downloaded_lock.stat.isdir is not defined or downloaded_lock.stat.isdir == False ) - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) - - name: Create moving_lock to ensure other nodes skip - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: directory - when: move_binary - register: moving_lock_created + - name: Create moving_lock to ensure other nodes skip + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: directory + when: move_binary + register: moving_lock_created - - name: Copy temp installer to home_shared - copy: - src: "{{ atl_product_temp_download }}" - dest: "{{ atl_product_home_shared_download }}" - remote_src: true - when: moving_lock_created is succeeded - register: copied + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_temp_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: true + when: moving_lock_created is succeeded + register: copied - - name: Create downloaded_lock once product installer downloaded and copied - file: - path: "{{ atl_product_home_shared_downloaded_lock }}" - state: directory - when: copied is succeeded - register: downloaded_lock_created + - name: Create downloaded_lock once product installer downloaded and copied + file: + path: "{{ atl_product_home_shared_downloaded_lock }}" + state: directory + when: copied is succeeded + register: downloaded_lock_created - - name: Remove moving_lock to show that binary is completed - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: absent - when: - - downloaded_lock_created is succeeded - - copied is succeeded - register: moving_lock_removed + - name: Remove moving_lock to show that binary is completed + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: absent + when: + - downloaded_lock_created is succeeded + - copied is succeeded + register: moving_lock_removed - - name: Delete old temp installer - file: - path: "{{ atl_product_temp_download }}" - state: absent - when: moving_lock_removed is succeeded - register: temp_deleted + - name: Delete old temp installer + file: + path: "{{ atl_product_temp_download }}" + state: absent + when: moving_lock_removed is succeeded + register: temp_deleted when: move_binary From 0711c75dabab15dbf383ce45a10c8bbe431cf3e2 Mon Sep 17 00:00:00 2001 From: Varun Arbatti Date: Fri, 11 Oct 2019 05:04:22 +0000 Subject: [PATCH 51/93] Merged in DCD-697-additional-cw-logs (pull request #31) DCD-697: Adds ansible and cfn logs to cloudwatch * DCD-697: Adds ansible and cfn logs to cloudwatch * DCD-697: Adds provisioning logs to a separarte log group. Adds a test to ensure provisioining logs exist * DCD-697: Refers to files, not directory, for provisioning log groups * DCD-697: Adds a comma between log groups * DCD-697: Removes redundant comma between the 2 log groups Approved-by: Steve Smith Approved-by: Adam Brokes --- group_vars/aws_node_local.yml | 4 ++++ roles/aws_common/defaults/main.yml | 1 + roles/aws_common/molecule/default/tests/test_default.py | 1 + .../aws_common/templates/amazon-cloudwatch-agent.json.j2 | 9 ++++++++- 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/group_vars/aws_node_local.yml b/group_vars/aws_node_local.yml index 69285f9..aff781d 100644 --- a/group_vars/aws_node_local.yml +++ b/group_vars/aws_node_local.yml @@ -45,6 +45,10 @@ atl_product_log_locations: - "{{ atl_product_home }}/log" crowd: [] +atl_provisioner_log_locations: + - "/var/log/ansible-bootstrap.log" + - "/var/log/cfn-*.log" + # The following are imports from the environment. These are generally # set in /etc/atl by the CloudFormation template and sourced before # Ansible is run. See bin/ansible-with-atl-env for a convenient wrapper diff --git a/roles/aws_common/defaults/main.yml b/roles/aws_common/defaults/main.yml index 3cb3b65..d43b4c4 100644 --- a/roles/aws_common/defaults/main.yml +++ b/roles/aws_common/defaults/main.yml @@ -11,3 +11,4 @@ atl_aws_enable_cloudwatch_logs: false atl_aws_agent_restart: true atl_aws_log_group: "{{ atl_product_edition }}-{{ atl_aws_stack_name }}" +atl_aws_provisioning_log_group: "{{ atl_aws_log_group }}-provisioning" diff --git a/roles/aws_common/molecule/default/tests/test_default.py b/roles/aws_common/molecule/default/tests/test_default.py index 53261bb..f605623 100644 --- a/roles/aws_common/molecule/default/tests/test_default.py +++ b/roles/aws_common/molecule/default/tests/test_default.py @@ -20,6 +20,7 @@ def test_package_exes(host, exe): def test_service_file(host): f = host.file('/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json') assert f.contains('"log_group_name": "jira-software-MY_STACK"') + assert f.contains('"log_group_name": "jira-software-MY_STACK-provisioning"') assert f.user == 'root' assert f.group == 'root' assert f.mode == 0o0644 diff --git a/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 b/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 index 17b31ca..81caac2 100644 --- a/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 +++ b/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 @@ -19,7 +19,14 @@ "log_stream_name": "{instance_id}" } {% endfor %} - + {% for path in atl_provisioner_log_locations %} + {{ comma() }} + { + "file_path": "{{ path }}", + "log_group_name": "{{ atl_aws_provisioning_log_group }}", + "log_stream_name": "{instance_id}" + } + {% endfor %} ] } } From fbb9316c26eea2383df30345a34b4e6b69da4c69 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 11 Oct 2019 16:37:28 +1100 Subject: [PATCH 52/93] ITOPSENG-164 Further testing required for logic change --- roles/product_install/tasks/main.yml | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index d6c7b61..f12f987 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -189,10 +189,12 @@ state: link force: true -# Product is installed. If the following are true, move to home_shared +# Temp product was downloaded and installed. +# If the following conditions are true, move to home_shared # 1. This node just downloaded binary. # 2. Another node is not already moving into place. -- name: "Move product installer" +# 3. The binary is downloaded and lockdir in place. +- name: "Check move product installer" block: - name: Check again for moving lock directory @@ -214,11 +216,15 @@ - name: Check lock directory and binary exists on shared_home set_fact: move_binary: true - atl_product_download: "{{ atl_product_home_shared_download }}" when: - - home_shared_download.stat.exists == False - - ( downloaded_lock.stat.isdir is not defined or downloaded_lock.stat.isdir == False ) - - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) + - ( home_shared_download.stat.exists == False or + downloaded_lock.stat.isdir is not defined or downloaded_lock.stat.isdir == False ) + - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) + + when: download_binary + +- name: "Move product installer if required" + block: - name: Create moving_lock to ensure other nodes skip file: From 0a6501d781fe78e17c44982730006f436da3a3dd Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Mon, 14 Oct 2019 17:29:16 +1100 Subject: [PATCH 53/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 192 +++++++++++++++------------ 1 file changed, 104 insertions(+), 88 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index f12f987..040b2ef 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -138,17 +138,11 @@ move_binary: false atl_product_download: "{{ atl_product_temp_download }}" -# Check for product installer in home_shared and lockdir to determine -# if it needs to be downloaded again. -- name: Check for moving lock directory +# Check for pre-downloaded binary on shared_home and completed lock dir. +- name: Check for completed lock directory stat: - path: "{{ atl_product_home_shared_moving_lock }}" - register: moving_lock - -- name: Check for downloaded lock directory - stat: - path: "{{ atl_product_home_shared_downloaded_lock }}" - register: downloaded_lock + path: "{{ atl_product_home_shared_completed_lock }}" + register: completed_lock - name: Check for product installer in home_shared stat: @@ -162,9 +156,8 @@ atl_product_download: "{{ atl_product_home_shared_download }}" when: - home_shared_download.stat.exists - - downloaded_lock.stat.isdir is defined - - downloaded_lock.stat.isdir - - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) + - completed_lock.stat.isdir is defined + - completed_lock.stat.isdir # If the binary was never installed, download it to temp location - name: Installer not on home_shared. Fetch it. @@ -173,98 +166,121 @@ dest: "{{ atl_product_temp_download }}" mode: 0755 force: false - register: atl_product_downloaded + register: atl_product_completed when: download_binary -# If product installer was fetched to temp, install from there -# If product installer was pre-downloaded on shared_home, install from there -# This is determined by {{ atl_product_download }} variable -- name: Unpack the downloaded application depending on format - include_tasks: "unpack_{{ atl_download_format }}.yml" +# If product installer was fetched +# Make the moving directory +# - failure, continue and install from temp +# - success, move binary and install from shared_home -- name: Symlink the installed version to current +- name: Create moving_lock. file: - src: "{{ atl_product_installation_versioned }}" - dest: "{{ atl_product_installation_current }}" - state: link - force: true + path: "{{ atl_product_home_shared_moving_lock }}" + state: directory + when: download_binary is succeeded + register: moving_lock_created -# Temp product was downloaded and installed. -# If the following conditions are true, move to home_shared -# 1. This node just downloaded binary. -# 2. Another node is not already moving into place. -# 3. The binary is downloaded and lockdir in place. -- name: "Check move product installer" - block: +- name: Debug Scenario A - lock created + debug: lock created + when: moving_lock_created is succeeded - - name: Check again for moving lock directory - stat: - path: "{{ atl_product_home_shared_moving_lock }}" - register: moving_lock_2 +- name: Debug Scenario B - lock cannot created + debug: lock not created + when: moving_lock_created is failed + + + + + +# # If product installer was pre-downloaded on shared_home, install from there +# # This is determined by {{ atl_product_download }} variable +# - name: Unpack the downloaded application depending on format +# include_tasks: "unpack_{{ atl_download_format }}.yml" + +# - name: Symlink the installed version to current +# file: +# src: "{{ atl_product_installation_versioned }}" +# dest: "{{ atl_product_installation_current }}" +# state: link +# force: true + +# # Temp product was downloaded and installed. +# # If the following conditions are true, move to home_shared +# # 1. This node just downloaded binary. +# # 2. Another node is not already moving into place. +# # 3. The binary is downloaded and lockdir in place. +# - name: "Check move product installer" +# block: + +# - name: Check again for moving lock directory +# stat: +# path: "{{ atl_product_home_shared_moving_lock }}" +# register: moving_lock_2 - - name: Check again for downloaded lock directory - stat: - path: "{{ atl_product_home_shared_downloaded_lock }}" - register: downloaded_lock_2 +# - name: Check again for completed lock directory +# stat: +# path: "{{ atl_product_home_shared_completed_lock }}" +# register: completed_lock_2 - - name: Check again for product installer in home_shared - stat: - path: "{{ atl_product_home_shared_download }}" - register: home_shared_download_2 +# - name: Check again for product installer in home_shared +# stat: +# path: "{{ atl_product_home_shared_download }}" +# register: home_shared_download_2 - # If binary exists and lockdir exists use this binary instead - - name: Check lock directory and binary exists on shared_home - set_fact: - move_binary: true - when: - - ( home_shared_download.stat.exists == False or - downloaded_lock.stat.isdir is not defined or downloaded_lock.stat.isdir == False ) - - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) +# # If binary exists and lockdir exists use this binary instead +# - name: Check lock directory and binary exists on shared_home +# set_fact: +# move_binary: true +# when: +# - ( home_shared_download.stat.exists == False or +# completed_lock.stat.isdir is not defined or completed_lock.stat.isdir == False ) +# - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) - when: download_binary +# when: download_binary -- name: "Move product installer if required" - block: +# - name: "Move product installer if required" +# block: - - name: Create moving_lock to ensure other nodes skip - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: directory - when: move_binary - register: moving_lock_created +# - name: Create moving_lock to ensure other nodes skip +# file: +# path: "{{ atl_product_home_shared_moving_lock }}" +# state: directory +# when: move_binary +# register: moving_lock_created - - name: Copy temp installer to home_shared - copy: - src: "{{ atl_product_temp_download }}" - dest: "{{ atl_product_home_shared_download }}" - remote_src: true - when: moving_lock_created is succeeded - register: copied +# - name: Copy temp installer to home_shared +# copy: +# src: "{{ atl_product_temp_download }}" +# dest: "{{ atl_product_home_shared_download }}" +# remote_src: true +# when: moving_lock_created is succeeded +# register: copied - - name: Create downloaded_lock once product installer downloaded and copied - file: - path: "{{ atl_product_home_shared_downloaded_lock }}" - state: directory - when: copied is succeeded - register: downloaded_lock_created +# - name: Create completed_lock once product installer downloaded and copied +# file: +# path: "{{ atl_product_home_shared_completed_lock }}" +# state: directory +# when: copied is succeeded +# register: completed_lock_created - - name: Remove moving_lock to show that binary is completed - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: absent - when: - - downloaded_lock_created is succeeded - - copied is succeeded - register: moving_lock_removed +# - name: Remove moving_lock to show that binary is completed +# file: +# path: "{{ atl_product_home_shared_moving_lock }}" +# state: absent +# when: +# - completed_lock_created is succeeded +# - copied is succeeded +# register: moving_lock_removed - - name: Delete old temp installer - file: - path: "{{ atl_product_temp_download }}" - state: absent - when: moving_lock_removed is succeeded - register: temp_deleted +# - name: Delete old temp installer +# file: +# path: "{{ atl_product_temp_download }}" +# state: absent +# when: moving_lock_removed is succeeded +# register: temp_deleted - when: move_binary +# when: move_binary From 70a48e58c4a2d178b1043b4218bc9e6b15a89823 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Mon, 14 Oct 2019 17:34:05 +1100 Subject: [PATCH 54/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/defaults/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/product_install/defaults/main.yml b/roles/product_install/defaults/main.yml index 61d8d04..931e639 100644 --- a/roles/product_install/defaults/main.yml +++ b/roles/product_install/defaults/main.yml @@ -25,7 +25,7 @@ atl_product_varfile: "{{ atl_installer_temp }}/{{ atl_product_family }}.varfile" atl_product_home_shared_download_dir: "{{ atl_product_home_shared }}/downloads" atl_product_home_shared_download: "{{ atl_product_home_shared_download_dir }}/{{ atl_product_download_filename }}" atl_product_home_shared_moving_lock: "{{ atl_product_home_shared_download }}_moving" -atl_product_home_shared_downloaded_lock: "{{ atl_product_home_shared_download }}_downloaded" +atl_product_home_shared_completed_lock: "{{ atl_product_home_shared_download }}_completed" atl_marketplace_base: "https://marketplace.atlassian.com" atl_servicedesk_latest_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/latest" From 9dea02808c49ad56d52b73b3bbc8c9f237efc351 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 10:16:52 +1100 Subject: [PATCH 55/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 32 +++++++++++++++++----------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 040b2ef..54e74c1 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -169,25 +169,33 @@ register: atl_product_completed when: download_binary -# If product installer was fetched -# Make the moving directory -# - failure, continue and install from temp -# - success, move binary and install from shared_home - +# If product installer was fetched make the moving directory - name: Create moving_lock. file: path: "{{ atl_product_home_shared_moving_lock }}" state: directory - when: download_binary is succeeded + when: + - atl_product_completed is succeeded register: moving_lock_created -- name: Debug Scenario A - lock created - debug: lock created - when: moving_lock_created is succeeded +# Directory lock was created by this run +# - Move binary +- name: Move binary Scenario - lock created by this run + debug: + msg: lock created + set_fact: + move_binary: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed == True -- name: Debug Scenario B - lock cannot created - debug: lock not created - when: moving_lock_created is failed +# Directory lock was either already created or could not be +# - Continue and install from temp +- name: Continue Scenario - lock cannot be created or created by previous run + debug: + msg: lock not created + when: moving_lock_created is not succeeded or + ( moving_lock_created is succeeded and moving_lock_created.changed == False ) From eedea5e32647adba15bd004e7b509f7fad7e946e Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 10:23:06 +1100 Subject: [PATCH 56/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 54e74c1..ad490f6 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -181,8 +181,6 @@ # Directory lock was created by this run # - Move binary - name: Move binary Scenario - lock created by this run - debug: - msg: lock created set_fact: move_binary: true when: From 2ac847dd3ab88ceb54fdeb3dc6c8a19082297b7f Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 10:28:48 +1100 Subject: [PATCH 57/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 74 ++++++++++++++++------------ 1 file changed, 42 insertions(+), 32 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index ad490f6..288b0f6 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -196,6 +196,48 @@ ( moving_lock_created is succeeded and moving_lock_created.changed == False ) +# Move binary block +- name: Move product installer to home_shared + block: + + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_temp_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed == True + register: copied + + - name: Create completed_lock once product installer downloaded and copied + file: + path: "{{ atl_product_home_shared_completed_lock }}" + state: directory + when: copied is succeeded + register: completed_lock_created + + - name: Remove moving_lock to show that binary is completed + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: absent + when: + - completed_lock_created is succeeded + - copied is succeeded + register: moving_lock_removed + + - name: Delete old temp installer + file: + path: "{{ atl_product_temp_download }}" + state: absent + when: moving_lock_removed is succeeded + register: temp_deleted + + when: move_binary + + + + @@ -255,38 +297,6 @@ # when: move_binary # register: moving_lock_created -# - name: Copy temp installer to home_shared -# copy: -# src: "{{ atl_product_temp_download }}" -# dest: "{{ atl_product_home_shared_download }}" -# remote_src: true -# when: moving_lock_created is succeeded -# register: copied - -# - name: Create completed_lock once product installer downloaded and copied -# file: -# path: "{{ atl_product_home_shared_completed_lock }}" -# state: directory -# when: copied is succeeded -# register: completed_lock_created - -# - name: Remove moving_lock to show that binary is completed -# file: -# path: "{{ atl_product_home_shared_moving_lock }}" -# state: absent -# when: -# - completed_lock_created is succeeded -# - copied is succeeded -# register: moving_lock_removed - -# - name: Delete old temp installer -# file: -# path: "{{ atl_product_temp_download }}" -# state: absent -# when: moving_lock_removed is succeeded -# register: temp_deleted - -# when: move_binary From 46e3665cd561cd0b4f0425ce0ff18b996b0fcab2 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 10:50:48 +1100 Subject: [PATCH 58/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 72 +++++++++++++++------------- 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 288b0f6..3665698 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -159,42 +159,46 @@ - completed_lock.stat.isdir is defined - completed_lock.stat.isdir -# If the binary was never installed, download it to temp location -- name: Installer not on home_shared. Fetch it. - get_url: - url: "{{ atl_product_download_url }}" - dest: "{{ atl_product_temp_download }}" - mode: 0755 - force: false - register: atl_product_completed +# Fetch binary if required +- name: download_binary is true so fetch and do all the things + block: + + - name: Fetch binary + get_url: + url: "{{ atl_product_download_url }}" + dest: "{{ atl_product_temp_download }}" + mode: 0755 + force: false + register: atl_product_completed + + # If product installer was fetched make the moving directory + - name: Create moving_lock. + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: directory + when: + - atl_product_completed is succeeded + register: moving_lock_created + + # Directory lock was created by this run + # - Move binary + - name: Move binary Scenario - lock created by this run + set_fact: + move_binary: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed == True + + # Directory lock was either already created or could not be + # - Continue and install from temp + - name: Continue Scenario - lock cannot be created or created by previous run + debug: + msg: lock not created + when: moving_lock_created is not succeeded or + ( moving_lock_created is succeeded and moving_lock_created.changed == False ) + when: download_binary -# If product installer was fetched make the moving directory -- name: Create moving_lock. - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: directory - when: - - atl_product_completed is succeeded - register: moving_lock_created - -# Directory lock was created by this run -# - Move binary -- name: Move binary Scenario - lock created by this run - set_fact: - move_binary: true - when: - - moving_lock_created is succeeded - - moving_lock_created.changed == True - -# Directory lock was either already created or could not be -# - Continue and install from temp -- name: Continue Scenario - lock cannot be created or created by previous run - debug: - msg: lock not created - when: moving_lock_created is not succeeded or - ( moving_lock_created is succeeded and moving_lock_created.changed == False ) - # Move binary block - name: Move product installer to home_shared From b9c5389ef5fc33758ca5eeeed959940475c921e6 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 10:53:25 +1100 Subject: [PATCH 59/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 77 ++++------------------------ 1 file changed, 11 insertions(+), 66 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 3665698..eba1b6b 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -199,7 +199,6 @@ when: download_binary - # Move binary block - name: Move product installer to home_shared block: @@ -239,69 +238,15 @@ when: move_binary +# At this point the binary is in place and can be used to install +# The location is determined by {{ atl_product_download }} variable +# set above +- name: Unpack the downloaded application depending on format + include_tasks: "unpack_{{ atl_download_format }}.yml" - - - - - -# # If product installer was pre-downloaded on shared_home, install from there -# # This is determined by {{ atl_product_download }} variable -# - name: Unpack the downloaded application depending on format -# include_tasks: "unpack_{{ atl_download_format }}.yml" - -# - name: Symlink the installed version to current -# file: -# src: "{{ atl_product_installation_versioned }}" -# dest: "{{ atl_product_installation_current }}" -# state: link -# force: true - -# # Temp product was downloaded and installed. -# # If the following conditions are true, move to home_shared -# # 1. This node just downloaded binary. -# # 2. Another node is not already moving into place. -# # 3. The binary is downloaded and lockdir in place. -# - name: "Check move product installer" -# block: - -# - name: Check again for moving lock directory -# stat: -# path: "{{ atl_product_home_shared_moving_lock }}" -# register: moving_lock_2 - -# - name: Check again for completed lock directory -# stat: -# path: "{{ atl_product_home_shared_completed_lock }}" -# register: completed_lock_2 - -# - name: Check again for product installer in home_shared -# stat: -# path: "{{ atl_product_home_shared_download }}" -# register: home_shared_download_2 - -# # If binary exists and lockdir exists use this binary instead -# - name: Check lock directory and binary exists on shared_home -# set_fact: -# move_binary: true -# when: -# - ( home_shared_download.stat.exists == False or -# completed_lock.stat.isdir is not defined or completed_lock.stat.isdir == False ) -# - ( moving_lock.stat.isdir is not defined or moving_lock.stat.isdir == False ) - -# when: download_binary - -# - name: "Move product installer if required" -# block: - -# - name: Create moving_lock to ensure other nodes skip -# file: -# path: "{{ atl_product_home_shared_moving_lock }}" -# state: directory -# when: move_binary -# register: moving_lock_created - - - - - +- name: Symlink the installed version to current + file: + src: "{{ atl_product_installation_versioned }}" + dest: "{{ atl_product_installation_current }}" + state: link + force: true From 2c1ce5bc2e18b1f883a96e978576443fb8b68292 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 10:57:51 +1100 Subject: [PATCH 60/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index eba1b6b..30d3cce 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -163,6 +163,10 @@ - name: download_binary is true so fetch and do all the things block: + - name: debug + msg: + - Download the binary + - name: Fetch binary get_url: url: "{{ atl_product_download_url }}" From cc5b3c8a70662a3dee8141650770599851b5f523 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 11:03:03 +1100 Subject: [PATCH 61/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 30d3cce..349df3e 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -164,8 +164,8 @@ block: - name: debug - msg: - - Download the binary + debug: + msg: Download the binary - name: Fetch binary get_url: @@ -242,6 +242,10 @@ when: move_binary +- name: debug + debug: + msg: Install from {{ atl_product_download }} + # At this point the binary is in place and can be used to install # The location is determined by {{ atl_product_download }} variable # set above From 37ae0eadc5026a0afe2dce7bc40c973351a48321 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 11:10:47 +1100 Subject: [PATCH 62/93] ITOPSENG-164 Debug some changes to logic --- roles/product_install/tasks/main.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 349df3e..5da046f 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -240,6 +240,11 @@ when: moving_lock_removed is succeeded register: temp_deleted + - name: Set install to home_shared location + set_fact: + atl_product_download: "{{ atl_product_home_shared_download }}" + when: temp_deleted is succeeded + when: move_binary - name: debug From 5f2e5929ab3f48305f3d41d316a5ea95f23cf4e0 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Tue, 15 Oct 2019 15:27:41 +1100 Subject: [PATCH 63/93] ITOPSENG-164 Remove all the debug now that logic is fixed --- roles/product_install/tasks/main.yml | 32 ++++++++-------------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 5da046f..068a3ee 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -163,10 +163,7 @@ - name: download_binary is true so fetch and do all the things block: - - name: debug - debug: - msg: Download the binary - + # Fetch binary and copy to temp - name: Fetch binary get_url: url: "{{ atl_product_download_url }}" @@ -175,7 +172,7 @@ force: false register: atl_product_completed - # If product installer was fetched make the moving directory + # If product installer was fetched make the lock directory - name: Create moving_lock. file: path: "{{ atl_product_home_shared_moving_lock }}" @@ -184,26 +181,20 @@ - atl_product_completed is succeeded register: moving_lock_created - # Directory lock was created by this run - # - Move binary + # Directory lock was created by this run? + # If so, then set a fact intending to move binary - name: Move binary Scenario - lock created by this run set_fact: move_binary: true when: - moving_lock_created is succeeded - moving_lock_created.changed == True - - # Directory lock was either already created or could not be - # - Continue and install from temp - - name: Continue Scenario - lock cannot be created or created by previous run - debug: - msg: lock not created - when: moving_lock_created is not succeeded or - ( moving_lock_created is succeeded and moving_lock_created.changed == False ) + # Otherwise directory lock was either already created or + # could not be created. Fall back is to continue and install from temp when: download_binary -# Move binary block +# If the intention is to move binary to home_shared - name: Move product installer to home_shared block: @@ -247,13 +238,8 @@ when: move_binary -- name: debug - debug: - msg: Install from {{ atl_product_download }} - -# At this point the binary is in place and can be used to install -# The location is determined by {{ atl_product_download }} variable -# set above +# At this point the binary is in {{ atl_product_download }} +# (which is either on home_shared or temp) - name: Unpack the downloaded application depending on format include_tasks: "unpack_{{ atl_download_format }}.yml" From 1dedfa827e116cb4a5ea5d960857f6a7d880b106 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Wed, 16 Oct 2019 14:37:05 +1100 Subject: [PATCH 64/93] ITOPSENG-164 yamllint fixes --- roles/product_install/tasks/main.yml | 118 +++++++++++++-------------- 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 068a3ee..388164c 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -163,34 +163,34 @@ - name: download_binary is true so fetch and do all the things block: - # Fetch binary and copy to temp - - name: Fetch binary - get_url: - url: "{{ atl_product_download_url }}" - dest: "{{ atl_product_temp_download }}" - mode: 0755 - force: false - register: atl_product_completed + # Fetch binary and copy to temp + - name: Fetch binary + get_url: + url: "{{ atl_product_download_url }}" + dest: "{{ atl_product_temp_download }}" + mode: 0755 + force: false + register: atl_product_completed - # If product installer was fetched make the lock directory - - name: Create moving_lock. - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: directory - when: - - atl_product_completed is succeeded - register: moving_lock_created + # If product installer was fetched make the lock directory + - name: Create moving_lock. + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: directory + when: + - atl_product_completed is succeeded + register: moving_lock_created - # Directory lock was created by this run? - # If so, then set a fact intending to move binary - - name: Move binary Scenario - lock created by this run - set_fact: - move_binary: true - when: - - moving_lock_created is succeeded - - moving_lock_created.changed == True - # Otherwise directory lock was either already created or - # could not be created. Fall back is to continue and install from temp + # Directory lock was created by this run? + # If so, then set a fact intending to move binary + - name: Move binary Scenario - lock created by this run + set_fact: + move_binary: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed == True + # Otherwise directory lock was either already created or + # could not be created. Fall back is to continue and install from temp when: download_binary @@ -198,43 +198,43 @@ - name: Move product installer to home_shared block: - - name: Copy temp installer to home_shared - copy: - src: "{{ atl_product_temp_download }}" - dest: "{{ atl_product_home_shared_download }}" - remote_src: true - when: - - moving_lock_created is succeeded - - moving_lock_created.changed == True - register: copied + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_temp_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed == True + register: copied - - name: Create completed_lock once product installer downloaded and copied - file: - path: "{{ atl_product_home_shared_completed_lock }}" - state: directory - when: copied is succeeded - register: completed_lock_created + - name: Create completed_lock once product installer downloaded and copied + file: + path: "{{ atl_product_home_shared_completed_lock }}" + state: directory + when: copied is succeeded + register: completed_lock_created - - name: Remove moving_lock to show that binary is completed - file: - path: "{{ atl_product_home_shared_moving_lock }}" - state: absent - when: - - completed_lock_created is succeeded - - copied is succeeded - register: moving_lock_removed + - name: Remove moving_lock to show that binary is completed + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: absent + when: + - completed_lock_created is succeeded + - copied is succeeded + register: moving_lock_removed - - name: Delete old temp installer - file: - path: "{{ atl_product_temp_download }}" - state: absent - when: moving_lock_removed is succeeded - register: temp_deleted + - name: Delete old temp installer + file: + path: "{{ atl_product_temp_download }}" + state: absent + when: moving_lock_removed is succeeded + register: temp_deleted - - name: Set install to home_shared location - set_fact: - atl_product_download: "{{ atl_product_home_shared_download }}" - when: temp_deleted is succeeded + - name: Set install to home_shared location + set_fact: + atl_product_download: "{{ atl_product_home_shared_download }}" + when: temp_deleted is succeeded when: move_binary From f24fb3fb6574b9acda4e2ccffeed557eb42063cd Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Wed, 16 Oct 2019 14:45:50 +1100 Subject: [PATCH 65/93] ITOPSENG-164 ansible-lint fixes --- roles/product_install/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 388164c..f136128 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -188,7 +188,7 @@ move_binary: true when: - moving_lock_created is succeeded - - moving_lock_created.changed == True + - moving_lock_created.changed # Otherwise directory lock was either already created or # could not be created. Fall back is to continue and install from temp @@ -205,7 +205,7 @@ remote_src: true when: - moving_lock_created is succeeded - - moving_lock_created.changed == True + - moving_lock_created.changed register: copied - name: Create completed_lock once product installer downloaded and copied From 244e6664e61e5ba9263d549cbcfb2d26d6446012 Mon Sep 17 00:00:00 2001 From: Geoff Jacobs Date: Mon, 21 Oct 2019 10:46:56 +1100 Subject: [PATCH 66/93] ITOPSENG-258 setting the Crowd node name via CATALINA_OPTS --- roles/crowd_config/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index ef7dcc2..9df29b9 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -25,9 +25,9 @@ lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" insertafter: "EOF" - line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"' + line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }} -Dcluster.node.name={{ ansible_ec2_instance_id }}-{{ ansible_ec2_local_ipv4 }}"' -- name: Set JAVA_HOME #FIXME +- name: Set JAVA_HOME lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" insertafter: "EOF" From 1df244d7788d439b480cc3b265d9c17c2e7f54be Mon Sep 17 00:00:00 2001 From: Geoff Jacobs Date: Mon, 21 Oct 2019 10:49:53 +1100 Subject: [PATCH 67/93] ITOPSENG-258 splitting to a new task to appease the linting overlords --- roles/crowd_config/tasks/main.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index 9df29b9..f6e8493 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -25,7 +25,13 @@ lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" insertafter: "EOF" - line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }} -Dcluster.node.name={{ ansible_ec2_instance_id }}-{{ ansible_ec2_local_ipv4 }}"' + line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"' + +- name: Set the Crowd node name via CATALINA_OPTS + lineinfile: + path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" + insertafter: "EOF" + line: export CATALINA_OPTS="${CATALINA_OPTS} -Dcluster.node.name={{ ansible_ec2_instance_id }}-{{ ansible_ec2_local_ipv4 }}" - name: Set JAVA_HOME lineinfile: From 7e4d1301e10ad2c133341b0275913d85c7bf9f67 Mon Sep 17 00:00:00 2001 From: Varun Arbatti <1063972+theghostwhoforks@users.noreply.github.com> Date: Mon, 21 Oct 2019 11:08:28 +1100 Subject: [PATCH 68/93] DCD-686: Changes case of shared home from snakeCase to lower camel case --- roles/restore_backups/tasks/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index d12d073..18f2169 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -60,7 +60,7 @@ # following usages will need to be updated once it settles.. atl_backup_id: "{{ atl_backup_manifest.name }}" atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.db.location.value | basename }}" - atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.shared_home.location.value | basename }}" + atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.sharedHome.location.value | basename }}" # FIXME: Here we fetch the backups. However we may wish to stream # these directly from S3 to the target DB/FS to avoid requiring @@ -77,8 +77,8 @@ aws_s3: mode: get overwrite: different - bucket: "{{ atl_backup_manifest.artifacts.shared_home.location.value | urlsplit('hostname') }}" - object: "{{ atl_backup_manifest.artifacts.shared_home.location.value | urlsplit('path') }}" + bucket: "{{ atl_backup_manifest.artifacts.sharedHome.location.value | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.artifacts.sharedHome.location.value | urlsplit('path') }}" dest: "{{ atl_backup_home_dest }}" - name: Install distro-specific restore support packages From 2513450d07a15b30f44af06a54ab0c016dc8e964 Mon Sep 17 00:00:00 2001 From: Geoff Jacobs Date: Wed, 30 Oct 2019 10:49:39 +1100 Subject: [PATCH 69/93] ITOPSENG-283 updating the method for managing the xmx and xms settings in setenv.sh --- roles/crowd_config/tasks/main.yml | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index f6e8493..ea31775 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -5,16 +5,19 @@ src: server.xml.j2 dest: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/server.xml" -- name: Override JVM memory settings. - # Ugly but necessary as the product installs this file so we need to make the change here. +- name: Set the minimum heap size (Xms) lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" - backrefs: true - regexp: "^{{ item }}=" - line: "{{ item }}=\"{{ atl_jvm_heap }}\"" - with_items: - - 'JVM_MINIMUM_MEMORY' - - 'JVM_MAXIMUM_MEMORY' + regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' + line: '\1Xms${atl_jvm_heap}\3' + backrefs: yes + +- name: Set the maxmimum heap size (Xmx) + lineinfile: + path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" + regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' + line: '\1Xmx${xms}m\3' + backrefs: yes - name: Set Crowd home directory in crowd-init.properties file lineinfile: From 4c0cec450909f8c6382cb6689c166facb7c70504 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 30 Oct 2019 11:01:18 +1100 Subject: [PATCH 70/93] DCD-686: Check for no-op when no manifest URL specified. --- group_vars/aws_node_local.yml | 1 + roles/restore_backups/.yamllint | 12 +++++++ .../molecule/default/Dockerfile.j2 | 14 ++++++++ .../molecule/default/molecule.yml | 36 +++++++++++++++++++ .../molecule/default/playbook.yml | 10 ++++++ .../molecule/default/tests/test_default.py | 10 ++++++ roles/restore_backups/tasks/main.yml | 2 +- 7 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 roles/restore_backups/.yamllint create mode 100644 roles/restore_backups/molecule/default/Dockerfile.j2 create mode 100644 roles/restore_backups/molecule/default/molecule.yml create mode 100644 roles/restore_backups/molecule/default/playbook.yml create mode 100644 roles/restore_backups/molecule/default/tests/test_default.py diff --git a/group_vars/aws_node_local.yml b/group_vars/aws_node_local.yml index 69285f9..1675ba8 100644 --- a/group_vars/aws_node_local.yml +++ b/group_vars/aws_node_local.yml @@ -128,3 +128,4 @@ atl_rds_subnet_group_name: "{{ lookup('env', 'ATL_RDS_SUBNET_GROUP_NAME') }}" atl_rds_security_group: "{{ lookup('env', 'ATL_RDS_SECURITY_GROUP') }}" atl_backup_manifest_url: "{{ lookup('env', 'ATL_BACKUP_MANIFEST_URL') }}" +atl_restore_required: "{{ atl_backup_manifest_url is defined and atl_backup_manifest_url != '' }}" diff --git a/roles/restore_backups/.yamllint b/roles/restore_backups/.yamllint new file mode 100644 index 0000000..a87f8ff --- /dev/null +++ b/roles/restore_backups/.yamllint @@ -0,0 +1,12 @@ +extends: default + +rules: + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + line-length: disable + truthy: disable + trailing-spaces: false diff --git a/roles/restore_backups/molecule/default/Dockerfile.j2 b/roles/restore_backups/molecule/default/Dockerfile.j2 new file mode 100644 index 0000000..e6aa95d --- /dev/null +++ b/roles/restore_backups/molecule/default/Dockerfile.j2 @@ -0,0 +1,14 @@ +# Molecule managed + +{% if item.registry is defined %} +FROM {{ item.registry.url }}/{{ item.image }} +{% else %} +FROM {{ item.image }} +{% endif %} + +RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ + elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ + elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ + elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ + elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ + elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/restore_backups/molecule/default/molecule.yml b/roles/restore_backups/molecule/default/molecule.yml new file mode 100644 index 0000000..7f082f6 --- /dev/null +++ b/roles/restore_backups/molecule/default/molecule.yml @@ -0,0 +1,36 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint +platforms: + - name: amazon_linux2 + image: amazonlinux:2 + groups: + - aws_node_local + ulimits: + - nofile:262144:262144 + - name: ubuntu_lts + image: ubuntu:bionic + groups: + - aws_node_local + ulimits: + - nofile:262144:262144 +provisioner: + name: ansible + options: + skip-tags: runtime_pkg + lint: + name: ansible-lint + options: + x: ["701"] + inventory: + links: + group_vars: ../../../../group_vars/ +verifier: + name: testinfra + lint: + name: flake8 + enabled: false diff --git a/roles/restore_backups/molecule/default/playbook.yml b/roles/restore_backups/molecule/default/playbook.yml new file mode 100644 index 0000000..ffd0c12 --- /dev/null +++ b/roles/restore_backups/molecule/default/playbook.yml @@ -0,0 +1,10 @@ +--- +- name: Converge + hosts: all + vars: + atl_backup_manifest_url: '' + atl_backup_home_restore_canary_path: '/tmp/canary.tmp' + + roles: + # Should be no-op + - role: restore_backups diff --git a/roles/restore_backups/molecule/default/tests/test_default.py b/roles/restore_backups/molecule/default/tests/test_default.py new file mode 100644 index 0000000..0a7276f --- /dev/null +++ b/roles/restore_backups/molecule/default/tests/test_default.py @@ -0,0 +1,10 @@ +import os + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') + + +def test_no_canary_file(host): + assert not host.file('atl_backup_home_restore_canary_path').exists diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index 18f2169..8767b4d 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -133,4 +133,4 @@ when: not restore_canary.stat.exists - when: atl_backup_manifest_url is defined and atl_backup_manifest_url != '' + when: atl_restore_required From e440daa1a5de40f9dae6d0fd6e10d4c09e61f676 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Wed, 30 Oct 2019 11:03:19 +1100 Subject: [PATCH 71/93] DCD-686: Update pipeline tests. --- bitbucket-pipelines.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index ec993c6..d28b140 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -14,7 +14,7 @@ pipelines: - step: name: Pre Parallelization stage script: - - echo "Running tests in 28 batches" + - echo "Running tests in 29 batches" - step: name: Check if number of batches match actual number of scenarios script: @@ -251,4 +251,12 @@ pipelines: - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 28 + - step: + name: Molecule Test Batch - 29 + services: + - docker + script: + - apt-get update && ./bin/install-ansible --dev + - ./bin/run-tests-in-batches --batch 29 + From 5032371fed22d753cb2f3e9936be533bf26ec85f Mon Sep 17 00:00:00 2001 From: Geoff Jacobs Date: Wed, 30 Oct 2019 11:14:39 +1100 Subject: [PATCH 72/93] ITOPSENG-283 fixing typo and adding debug --- roles/crowd_config/tasks/main.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index ea31775..3b5afdc 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -11,13 +11,15 @@ regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' line: '\1Xms${atl_jvm_heap}\3' backrefs: yes + verbosity: 4 - name: Set the maxmimum heap size (Xmx) lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' - line: '\1Xmx${xms}m\3' + line: '\1Xmx${atl_jvm_heap}\3' backrefs: yes + verbosity: 4 - name: Set Crowd home directory in crowd-init.properties file lineinfile: From 7d90e8b65a91dee94a4a5dfea90561f7c4a1cd01 Mon Sep 17 00:00:00 2001 From: Geoff Jacobs Date: Wed, 30 Oct 2019 11:21:34 +1100 Subject: [PATCH 73/93] ITOPSENG-283 more debug --- bin/ansible-with-atl-env | 2 +- roles/crowd_config/tasks/main.yml | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/bin/ansible-with-atl-env b/bin/ansible-with-atl-env index 3685381..b3e05e0 100755 --- a/bin/ansible-with-atl-env +++ b/bin/ansible-with-atl-env @@ -18,7 +18,7 @@ set +a # Use Ansible from virtualenv if provided pipenv run \ - ansible-playbook -v \ + ansible-playbook -vvvv \ $ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \ -i $INV \ $PLAYBOOK \ diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index 3b5afdc..817ad2e 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -11,7 +11,6 @@ regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' line: '\1Xms${atl_jvm_heap}\3' backrefs: yes - verbosity: 4 - name: Set the maxmimum heap size (Xmx) lineinfile: @@ -19,7 +18,6 @@ regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' line: '\1Xmx${atl_jvm_heap}\3' backrefs: yes - verbosity: 4 - name: Set Crowd home directory in crowd-init.properties file lineinfile: From ee759efeadefd9a0d7cebb9656660770f23613a0 Mon Sep 17 00:00:00 2001 From: Geoff Jacobs Date: Wed, 30 Oct 2019 11:45:57 +1100 Subject: [PATCH 74/93] ITOPSENG-283 upstream ansible example had wrong syntax :facepalm: --- bin/ansible-with-atl-env | 2 +- roles/crowd_config/tasks/main.yml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bin/ansible-with-atl-env b/bin/ansible-with-atl-env index b3e05e0..3685381 100755 --- a/bin/ansible-with-atl-env +++ b/bin/ansible-with-atl-env @@ -18,7 +18,7 @@ set +a # Use Ansible from virtualenv if provided pipenv run \ - ansible-playbook -vvvv \ + ansible-playbook -v \ $ATL_DEPLOYMENT_REPOSITORY_CUSTOM_PARAMS \ -i $INV \ $PLAYBOOK \ diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index 817ad2e..b5b987c 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -8,15 +8,15 @@ - name: Set the minimum heap size (Xms) lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" - regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' - line: '\1Xms${atl_jvm_heap}\3' + regexp: '^(.*)Xms(\d+\w)(\s.*)$' + line: '\1Xms{{ atl_jvm_heap }}\3' backrefs: yes - name: Set the maxmimum heap size (Xmx) lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" - regexp: '^(.*)Xmx(\\d+\\w)(\\s.*)$' - line: '\1Xmx${atl_jvm_heap}\3' + regexp: '^(.*)Xmx(\d+\w)(\s.*)$' + line: '\1Xmx{{ atl_jvm_heap }}\3' backrefs: yes - name: Set Crowd home directory in crowd-init.properties file From 1ea30531b6806ae4753cbb1c20efdcecd314a987 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Thu, 31 Oct 2019 10:30:17 +1100 Subject: [PATCH 75/93] DCD-686: Remove HTTP manifest download for now. --- roles/restore_backups/tasks/main.yml | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml index 8767b4d..4d6865f 100644 --- a/roles/restore_backups/tasks/main.yml +++ b/roles/restore_backups/tasks/main.yml @@ -1,8 +1,8 @@ --- # This role will attempt to fetch and load the backup manifest from a -# remote HTTP or S3 URL. On successful completion the contents of JSON -# or YAML document will be in the var `atl_backup_manifest`. +# remote S3 URL. On successful completion the contents of JSON or YAML +# document will be in the var `atl_backup_manifest`. # # PREREQUISITES: # * `atl_backup_manifest_url` points at the manifest. @@ -12,6 +12,9 @@ # # NOTE: The actual DB/FS restore operations could potentially be split # out into discrete roles, but currently that is not required. +# +# TODO: Support HTTPS with authentication. Deferred until after the +# initial testing release. - block: @@ -43,12 +46,6 @@ dest: "{{ atl_backup_manifest_dest }}" when: atl_backup_manifest_url.scheme == 's3' - - name: Fetch the manifest from remote host - get_url: - url: "{{ atl_backup_manifest_url }}" - dest: "{{ atl_backup_manifest_dest }}" - when: atl_backup_manifest_url.scheme != 's3' - - name: Load parameters from manifest include_vars: file: "{{ atl_backup_manifest_dest }}" From 2d2fa39cbe31ba6a09f21537391baf4247d74557 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Thu, 31 Oct 2019 16:28:05 +1100 Subject: [PATCH 76/93] ITOPSENG-164 Testing change to molecule tests --- .../molecule/bitbucket_latest/tests/test_default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index 55c71c6..4229b1f 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -37,6 +37,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/bitbucket.' + upstream + '-x64.bin') + installer = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin') assert installer.exists assert installer.user == 'root' From 6e99ec440fe1eb4e30a9a37c17abb128ffcf3378 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Thu, 31 Oct 2019 16:40:26 +1100 Subject: [PATCH 77/93] Revert "ITOPSENG-164 Testing change to molecule tests" This reverts commit 2d2fa39cbe31ba6a09f21537391baf4247d74557. --- .../molecule/bitbucket_latest/tests/test_default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index 4229b1f..55c71c6 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -37,6 +37,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin') + installer = host.file('/opt/atlassian/tmp/bitbucket.' + upstream + '-x64.bin') assert installer.exists assert installer.user == 'root' From 36f870f20096567dab33c37cfe300dcadd6f8dda Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 10:57:06 +1100 Subject: [PATCH 78/93] ITOPSENG-164 Testing changes --- .../molecule/bitbucket_latest/tests/test_default.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index 55c71c6..ed87338 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -37,6 +37,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/bitbucket.' + upstream + '-x64.bin') + installer = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin') assert installer.exists - assert installer.user == 'root' + assert installer.user == 'root' From c015984acec4d9aa5ad9ae49e595f7e1d4041453 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 11:32:38 +1100 Subject: [PATCH 79/93] ITOPSENG-164 Testing changes --- .../molecule/confluence_latest/tests/test_default.py | 2 +- roles/product_install/molecule/default/tests/test_default.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/product_install/molecule/confluence_latest/tests/test_default.py b/roles/product_install/molecule/confluence_latest/tests/test_default.py index 47245a4..0b6c684 100644 --- a/roles/product_install/molecule/confluence_latest/tests/test_default.py +++ b/roles/product_install/molecule/confluence_latest/tests/test_default.py @@ -35,6 +35,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/confluence.'+upstream+'-x64.bin') + installer = host.file('/media/atl/confluence/shared-home/downloads/confluence.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/default/tests/test_default.py b/roles/product_install/molecule/default/tests/test_default.py index 70839b9..08626e3 100644 --- a/roles/product_install/molecule/default/tests/test_default.py +++ b/roles/product_install/molecule/default/tests/test_default.py @@ -23,6 +23,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' From 788bbd93dda7173131f30cd08169faecd9184ef5 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 11:58:31 +1100 Subject: [PATCH 80/93] ITOPSENG-164 Testing changes --- .../molecule/jira_version_latest/tests/test_default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/product_install/molecule/jira_version_latest/tests/test_default.py b/roles/product_install/molecule/jira_version_latest/tests/test_default.py index 70839b9..08626e3 100644 --- a/roles/product_install/molecule/jira_version_latest/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_latest/tests/test_default.py @@ -23,6 +23,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' From 8022b591573967fcb1e2773757f8d1d466504210 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 12:06:08 +1100 Subject: [PATCH 81/93] ITOPS-2158 testing --- .../product_install/molecule/jira_tarball/tests/test_default.py | 2 +- .../molecule/jira_version_from_file/tests/test_default.py | 2 +- .../molecule/jira_version_override/tests/test_default.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/product_install/molecule/jira_tarball/tests/test_default.py b/roles/product_install/molecule/jira_tarball/tests/test_default.py index 2f5d09b..d534aa5 100644 --- a/roles/product_install/molecule/jira_tarball/tests/test_default.py +++ b/roles/product_install/molecule/jira_tarball/tests/test_default.py @@ -23,6 +23,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'.tar.gz') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'.tar.gz') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/jira_version_from_file/tests/test_default.py b/roles/product_install/molecule/jira_version_from_file/tests/test_default.py index 5f00577..902b5f5 100644 --- a/roles/product_install/molecule/jira_version_from_file/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_from_file/tests/test_default.py @@ -14,7 +14,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.9.0" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.9.0-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.9.0-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/jira_version_override/tests/test_default.py b/roles/product_install/molecule/jira_version_override/tests/test_default.py index 8b5c7a4..c2e28c5 100644 --- a/roles/product_install/molecule/jira_version_override/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_override/tests/test_default.py @@ -14,7 +14,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.13.2" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.13.2-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.13.2-x64.bin') assert installer.exists assert installer.user == 'root' From 17366378de4895b2dd6aa28ea16fdf95cc63b799 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 12:22:50 +1100 Subject: [PATCH 82/93] ITOPS-2158 testing --- .../molecule/jira_cached_with_downgrade/tests/test_default.py | 2 +- .../molecule/jira_cached_with_upgrade/tests/test_default.py | 2 +- .../molecule/jira_software_latest/tests/test_default.py | 2 +- .../product_install/molecule/servicedesk3/tests/test_default.py | 2 +- .../product_install/molecule/servicedesk4/tests/test_default.py | 2 +- .../molecule/servicedesk_latest/tests/test_default.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py b/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py index 05a6bb3..aad72bf 100644 --- a/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py +++ b/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py @@ -14,7 +14,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.10.2" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.10.2-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.2-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py b/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py index ead6adf..c828961 100644 --- a/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py +++ b/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py @@ -14,7 +14,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.10.1" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.10.1-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.1-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/jira_software_latest/tests/test_default.py b/roles/product_install/molecule/jira_software_latest/tests/test_default.py index 63451c8..9a5b161 100644 --- a/roles/product_install/molecule/jira_software_latest/tests/test_default.py +++ b/roles/product_install/molecule/jira_software_latest/tests/test_default.py @@ -35,6 +35,6 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-software.'+upstream+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-software.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/servicedesk3/tests/test_default.py b/roles/product_install/molecule/servicedesk3/tests/test_default.py index 24afb62..ed19b29 100644 --- a/roles/product_install/molecule/servicedesk3/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk3/tests/test_default.py @@ -14,7 +14,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "3.9.0" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/servicedesk.3.9.0-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/servicedesk.3.9.0-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/servicedesk4/tests/test_default.py b/roles/product_install/molecule/servicedesk4/tests/test_default.py index b660f23..e7d1179 100644 --- a/roles/product_install/molecule/servicedesk4/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk4/tests/test_default.py @@ -14,7 +14,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "4.1.0" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/servicedesk.4.1.0-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/servicedesk.4.1.0-x64.bin') assert installer.exists assert installer.user == 'root' diff --git a/roles/product_install/molecule/servicedesk_latest/tests/test_default.py b/roles/product_install/molecule/servicedesk_latest/tests/test_default.py index 2190295..ee855e3 100644 --- a/roles/product_install/molecule/servicedesk_latest/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk_latest/tests/test_default.py @@ -23,7 +23,7 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == sd def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/servicedesk.'+sd+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/servicedesk.'+sd+'-x64.bin') assert installer.exists assert installer.user == 'root' From e55593bf6c50171a8618bbe89b0f77905a2860a9 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 13:35:30 +1100 Subject: [PATCH 83/93] ITOPS-2158 testing --- .../molecule/bitbucket_latest/tests/test_default.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index ed87338..6c28b1b 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -39,4 +39,8 @@ def test_latest_is_downloaded(host): installer = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin') assert installer.exists - assert installer.user == 'root' + assert installer.user == 'root' + +def test_completed_lockfile(host): + verfile = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin_completed') + assert verfile.exists From ae673495276cdd1c7068458e305c352e212a7fc0 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 13:51:16 +1100 Subject: [PATCH 84/93] ITOPS-2158 testing --- .../molecule/bitbucket_latest/tests/test_default.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index 6c28b1b..c709022 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -42,5 +42,10 @@ def test_latest_is_downloaded(host): assert installer.user == 'root' def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/bitbucket/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + verfile = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin_completed') assert verfile.exists From c10b312148f373fff987d48858dbeb3925e7643c Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 14:52:57 +1100 Subject: [PATCH 85/93] ITOPS-2158 Remaining molecule unit tests --- .../molecule/bitbucket_latest/tests/test_default.py | 7 ++++--- .../molecule/confluence_latest/tests/test_default.py | 9 +++++++++ .../molecule/default/tests/test_default.py | 9 +++++++++ .../jira_cached_with_downgrade/tests/test_default.py | 5 +++++ .../jira_cached_with_upgrade/tests/test_default.py | 5 +++++ .../molecule/jira_software_latest/tests/test_default.py | 9 +++++++++ .../molecule/jira_tarball/tests/test_default.py | 9 +++++++++ .../jira_version_from_file/tests/test_default.py | 5 +++++ .../molecule/jira_version_latest/tests/test_default.py | 9 +++++++++ .../molecule/jira_version_override/tests/test_default.py | 5 +++++ .../molecule/servicedesk3/tests/test_default.py | 5 +++++ .../molecule/servicedesk4/tests/test_default.py | 5 +++++ .../molecule/servicedesk_latest/tests/test_default.py | 5 +++++ 13 files changed, 84 insertions(+), 3 deletions(-) diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index c709022..bbd851d 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -46,6 +46,7 @@ def test_completed_lockfile(host): "https://marketplace.atlassian.com/rest/2/applications/bitbucket/versions/latest") upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - - verfile = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin_completed') - assert verfile.exists + + lockfile = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' diff --git a/roles/product_install/molecule/confluence_latest/tests/test_default.py b/roles/product_install/molecule/confluence_latest/tests/test_default.py index 0b6c684..7ec072b 100644 --- a/roles/product_install/molecule/confluence_latest/tests/test_default.py +++ b/roles/product_install/molecule/confluence_latest/tests/test_default.py @@ -38,3 +38,12 @@ def test_latest_is_downloaded(host): installer = host.file('/media/atl/confluence/shared-home/downloads/confluence.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/confluence/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/confluence/shared-home/downloads/confluence.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' diff --git a/roles/product_install/molecule/default/tests/test_default.py b/roles/product_install/molecule/default/tests/test_default.py index 08626e3..930ab59 100644 --- a/roles/product_install/molecule/default/tests/test_default.py +++ b/roles/product_install/molecule/default/tests/test_default.py @@ -26,3 +26,12 @@ def test_latest_is_downloaded(host): installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py b/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py index aad72bf..788c3de 100644 --- a/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py +++ b/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py @@ -18,6 +18,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.2-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.10.2/atlassian-jira/') assert installer.exists diff --git a/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py b/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py index c828961..0818e1b 100644 --- a/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py +++ b/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py @@ -18,6 +18,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.1-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.10.1/atlassian-jira/') assert installer.exists diff --git a/roles/product_install/molecule/jira_software_latest/tests/test_default.py b/roles/product_install/molecule/jira_software_latest/tests/test_default.py index 9a5b161..de1dca3 100644 --- a/roles/product_install/molecule/jira_software_latest/tests/test_default.py +++ b/roles/product_install/molecule/jira_software_latest/tests/test_default.py @@ -38,3 +38,12 @@ def test_latest_is_downloaded(host): installer = host.file('/media/atl/jira/shared/downloads/jira-software.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-software.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_tarball/tests/test_default.py b/roles/product_install/molecule/jira_tarball/tests/test_default.py index d534aa5..11a7438 100644 --- a/roles/product_install/molecule/jira_tarball/tests/test_default.py +++ b/roles/product_install/molecule/jira_tarball/tests/test_default.py @@ -26,3 +26,12 @@ def test_latest_is_downloaded(host): installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'.tar.gz') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'.tar.gz_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_version_from_file/tests/test_default.py b/roles/product_install/molecule/jira_version_from_file/tests/test_default.py index 902b5f5..b8a1966 100644 --- a/roles/product_install/molecule/jira_version_from_file/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_from_file/tests/test_default.py @@ -18,6 +18,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.9.0-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.9.0/atlassian-jira/') assert installer.exists diff --git a/roles/product_install/molecule/jira_version_latest/tests/test_default.py b/roles/product_install/molecule/jira_version_latest/tests/test_default.py index 08626e3..930ab59 100644 --- a/roles/product_install/molecule/jira_version_latest/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_latest/tests/test_default.py @@ -26,3 +26,12 @@ def test_latest_is_downloaded(host): installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_version_override/tests/test_default.py b/roles/product_install/molecule/jira_version_override/tests/test_default.py index c2e28c5..3f16801 100644 --- a/roles/product_install/molecule/jira_version_override/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_override/tests/test_default.py @@ -18,6 +18,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.13.2-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.13.2') assert installer.exists diff --git a/roles/product_install/molecule/servicedesk3/tests/test_default.py b/roles/product_install/molecule/servicedesk3/tests/test_default.py index ed19b29..5f50b6e 100644 --- a/roles/product_install/molecule/servicedesk3/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk3/tests/test_default.py @@ -18,6 +18,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/servicedesk.3.9.0-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-servicedesk/3.9.0') assert installer.exists diff --git a/roles/product_install/molecule/servicedesk4/tests/test_default.py b/roles/product_install/molecule/servicedesk4/tests/test_default.py index e7d1179..5a22e8c 100644 --- a/roles/product_install/molecule/servicedesk4/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk4/tests/test_default.py @@ -18,6 +18,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/servicedesk.4.1.0-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-servicedesk/4.1.0') assert installer.exists diff --git a/roles/product_install/molecule/servicedesk_latest/tests/test_default.py b/roles/product_install/molecule/servicedesk_latest/tests/test_default.py index ee855e3..cd975f3 100644 --- a/roles/product_install/molecule/servicedesk_latest/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk_latest/tests/test_default.py @@ -27,6 +27,11 @@ def test_is_downloaded(host): assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/servicedesk.'+sd+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-servicedesk/'+sd) assert installer.exists From d7ef24eaa94ffdf08f1f055fc0d2699e0ee360b0 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 15:27:17 +1100 Subject: [PATCH 86/93] ITOPS-2158 Added latest crowd molecule test --- .../molecule/crowd_latest/Dockerfile.j2 | 14 ++++++ .../molecule/crowd_latest/molecule.yml | 30 ++++++++++++ .../molecule/crowd_latest/playbook.yml | 11 +++++ .../crowd_latest/tests/test_default.py | 49 +++++++++++++++++++ 4 files changed, 104 insertions(+) create mode 100644 roles/product_install/molecule/crowd_latest/Dockerfile.j2 create mode 100644 roles/product_install/molecule/crowd_latest/molecule.yml create mode 100644 roles/product_install/molecule/crowd_latest/playbook.yml create mode 100644 roles/product_install/molecule/crowd_latest/tests/test_default.py diff --git a/roles/product_install/molecule/crowd_latest/Dockerfile.j2 b/roles/product_install/molecule/crowd_latest/Dockerfile.j2 new file mode 100644 index 0000000..e6aa95d --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/Dockerfile.j2 @@ -0,0 +1,14 @@ +# Molecule managed + +{% if item.registry is defined %} +FROM {{ item.registry.url }}/{{ item.image }} +{% else %} +FROM {{ item.image }} +{% endif %} + +RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ + elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ + elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ + elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ + elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ + elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/product_install/molecule/crowd_latest/molecule.yml b/roles/product_install/molecule/crowd_latest/molecule.yml new file mode 100644 index 0000000..7fd3163 --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/molecule.yml @@ -0,0 +1,30 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint +platforms: + - name: amazon_linux2 + image: amazonlinux:2 + groups: + - aws_node_local + - name: ubuntu_lts + image: ubuntu:bionic + groups: + - aws_node_local +provisioner: + name: ansible + options: + skip-tags: runtime_pkg + lint: + name: ansible-lint + inventory: + links: + group_vars: ../../../../group_vars/ +verifier: + name: testinfra + lint: + name: flake8 + enabled: false diff --git a/roles/product_install/molecule/crowd_latest/playbook.yml b/roles/product_install/molecule/crowd_latest/playbook.yml new file mode 100644 index 0000000..3373f8a --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/playbook.yml @@ -0,0 +1,11 @@ +--- +- name: Converge + hosts: all + vars: + atl_product_family: "crowd" + atl_product_edition: "crowd" + atl_product_user: "crowd" + roles: + - role: linux_common + - role: product_common + - role: product_install diff --git a/roles/product_install/molecule/crowd_latest/tests/test_default.py b/roles/product_install/molecule/crowd_latest/tests/test_default.py new file mode 100644 index 0000000..64c8b58 --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/tests/test_default.py @@ -0,0 +1,49 @@ +import os +from six.moves import urllib +import json + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') + +def test_version_downloaded(host): + verfile = host.file('/media/atl/crowd/shared/crowd.version') + assert verfile.exists + +def test_symlink_created(host): + target = host.file('/opt/atlassian/crowd/current') + assert target.exists + assert target.is_symlink + +def test_unpacked(host): + verfile = host.file('/opt/atlassian/crowd/current/bin/catalina.sh') + assert verfile.exists + +def test_version_file_is_latest(host): + verfile = host.file('/media/atl/crowd/shared/crowd.version') + assert verfile.exists + + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + assert verfile.content.decode("UTF-8").strip() == upstream.strip() + +def test_latest_is_downloaded(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + installer = host.file('/media/atl/crowd/shared/downloads/crowd.'+upstream+'-x64.bin') + assert installer.exists + assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/crowd/shared/downloads/crowd.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' From 2b774cd8e67c4ccb94fbe6282da38ef0eea28c96 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 15:35:41 +1100 Subject: [PATCH 87/93] ITOPS-2158 Added latest crowd molecule test --- bitbucket-pipelines.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index d28b140..11d18bd 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -259,4 +259,10 @@ pipelines: - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 29 - + - step: + name: Molecule Test Batch - 30 + services: + - docker + script: + - apt-get update && ./bin/install-ansible --dev + - ./bin/run-tests-in-batches --batch 30 From 3032a001be895e8ff84a5283048cdb65d86f0a76 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 15:42:56 +1100 Subject: [PATCH 88/93] ITOPS-2158 Added latest crowd molecule test --- bitbucket-pipelines.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index 11d18bd..34b84dd 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -14,7 +14,7 @@ pipelines: - step: name: Pre Parallelization stage script: - - echo "Running tests in 29 batches" + - echo "Running tests in 30 batches" - step: name: Check if number of batches match actual number of scenarios script: @@ -266,3 +266,4 @@ pipelines: script: - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 30 + From cadae8248f949f6b08ff31c0fec5f38af09446dc Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Fri, 1 Nov 2019 16:19:51 +1100 Subject: [PATCH 89/93] ITOPS-2158 Changed run_user test to be more specific --- bitbucket-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index 34b84dd..408b078 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -267,3 +267,4 @@ pipelines: - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 30 + From 531fb18294afbf702cfac3b441e06434a32288bb Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Mon, 4 Nov 2019 10:39:59 +1100 Subject: [PATCH 90/93] ITOPS-2158 Changed run_user test to be more specific --- .../molecule/crowd_latest/tests/test_default.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/roles/product_install/molecule/crowd_latest/tests/test_default.py b/roles/product_install/molecule/crowd_latest/tests/test_default.py index 64c8b58..51ec463 100644 --- a/roles/product_install/molecule/crowd_latest/tests/test_default.py +++ b/roles/product_install/molecule/crowd_latest/tests/test_default.py @@ -17,33 +17,36 @@ def test_symlink_created(host): assert target.is_symlink def test_unpacked(host): - verfile = host.file('/opt/atlassian/crowd/current/bin/catalina.sh') + verfile = host.file('/opt/atlassian/crowd/current/bin/start-crowd.sh') assert verfile.exists def test_version_file_is_latest(host): verfile = host.file('/media/atl/crowd/shared/crowd.version') assert verfile.exists - upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] assert verfile.content.decode("UTF-8").strip() == upstream.strip() def test_latest_is_downloaded(host): - upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/media/atl/crowd/shared/downloads/crowd.'+upstream+'-x64.bin') + installer = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '-x64.bin') assert installer.exists assert installer.user == 'root' def test_completed_lockfile(host): - upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - lockfile = host.file('/media/atl/crowd/shared/downloads/crowd.'+upstream+'-x64.bin_completed') + lockfile = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '-x64.bin_completed') assert lockfile.exists assert lockfile.user == 'root' From 952c8ac97e55830554c494d7561074b9145b6eb8 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Mon, 4 Nov 2019 11:03:56 +1100 Subject: [PATCH 91/93] ITOPS-2158 Added crowd test - testing --- .../product_install/molecule/crowd_latest/tests/test_default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/product_install/molecule/crowd_latest/tests/test_default.py b/roles/product_install/molecule/crowd_latest/tests/test_default.py index 51ec463..eb186dc 100644 --- a/roles/product_install/molecule/crowd_latest/tests/test_default.py +++ b/roles/product_install/molecule/crowd_latest/tests/test_default.py @@ -17,7 +17,7 @@ def test_symlink_created(host): assert target.is_symlink def test_unpacked(host): - verfile = host.file('/opt/atlassian/crowd/current/bin/start-crowd.sh') + verfile = host.file('/opt/atlassian/crowd/current/start_crowd.sh') assert verfile.exists def test_version_file_is_latest(host): From 0b129bce1d2a60044c4279bbc047699692559ff3 Mon Sep 17 00:00:00 2001 From: Steve Smith Date: Mon, 4 Nov 2019 11:14:11 +1100 Subject: [PATCH 92/93] DCD-431: Fix incorrect service start-up target and make configurable. --- roles/product_startup/defaults/main.yml | 3 ++- roles/product_startup/templates/product.service.j2 | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/roles/product_startup/defaults/main.yml b/roles/product_startup/defaults/main.yml index ca1eda5..45d6f8a 100644 --- a/roles/product_startup/defaults/main.yml +++ b/roles/product_startup/defaults/main.yml @@ -14,5 +14,6 @@ atl_startup_exec_path: "{{ atl_product_installation_current }}/{{ atl_startup_sc atl_startup_exec_options: ["-fg"] atl_startup_systemd_params: [] - atl_systemd_service_name: "{{ atl_product_edition }}.service" + +atl_systemd_service_target: "multi-user.target" diff --git a/roles/product_startup/templates/product.service.j2 b/roles/product_startup/templates/product.service.j2 index 6b5077f..8310e88 100644 --- a/roles/product_startup/templates/product.service.j2 +++ b/roles/product_startup/templates/product.service.j2 @@ -15,4 +15,4 @@ ExecStart={{ atl_startup_exec_path }}{% for c in atl_startup_exec_options %} {{ Restart=on-failure [Install] -WantedBy=multi-target.target +WantedBy={{ atl_systemd_service_target }} From acfd10d2ae475347868ac6008052097aecbdf124 Mon Sep 17 00:00:00 2001 From: Glenn Stewart Date: Mon, 4 Nov 2019 11:39:08 +1100 Subject: [PATCH 93/93] ITOPS-2158 Added crowd test - testing --- roles/product_install/molecule/crowd_latest/playbook.yml | 1 + .../molecule/crowd_latest/tests/test_default.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/roles/product_install/molecule/crowd_latest/playbook.yml b/roles/product_install/molecule/crowd_latest/playbook.yml index 3373f8a..490514e 100644 --- a/roles/product_install/molecule/crowd_latest/playbook.yml +++ b/roles/product_install/molecule/crowd_latest/playbook.yml @@ -5,6 +5,7 @@ atl_product_family: "crowd" atl_product_edition: "crowd" atl_product_user: "crowd" + atl_download_format: "tarball" roles: - role: linux_common - role: product_common diff --git a/roles/product_install/molecule/crowd_latest/tests/test_default.py b/roles/product_install/molecule/crowd_latest/tests/test_default.py index eb186dc..b75a0b5 100644 --- a/roles/product_install/molecule/crowd_latest/tests/test_default.py +++ b/roles/product_install/molecule/crowd_latest/tests/test_default.py @@ -37,7 +37,7 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '-x64.bin') + installer = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '.tar.gz') assert installer.exists assert installer.user == 'root' @@ -47,6 +47,6 @@ def test_completed_lockfile(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - lockfile = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '-x64.bin_completed') + lockfile = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '.tar.gz_completed') assert lockfile.exists assert lockfile.user == 'root'