mirror of
https://bitbucket.org/atlassian/dc-deployments-automation.git
synced 2025-12-13 16:33:08 -06:00
DCD-436: Add flag to disable CloudWatch completely.
This commit is contained in:
@@ -56,7 +56,8 @@ atl_aws_region: "{{ lookup('env', 'ATL_AWS_REGION') }}"
|
||||
atl_aws_iam_role: "{{ lookup('env', 'ATL_AWS_IAM_ROLE') }}"
|
||||
atl_aws_iam_role_arn: "{{ lookup('env', 'ATL_AWS_IAM_ROLE_ARN') }}"
|
||||
|
||||
atl_aws_enable_cw_logs: "{{ lookup('env', 'ATL_AWS_ENABLE_CW_LOGS') or false }}"
|
||||
atl_aws_enable_cw: "{{ lookup('env', 'ATL_AWS_ENABLE_CW')|bool or false }}"
|
||||
atl_aws_enable_cw_logs: "{{ lookup('env', 'ATL_AWS_ENABLE_CW_LOGS')|bool or false }}"
|
||||
|
||||
atl_db_engine: "{{ lookup('env', 'ATL_DB_ENGINE') }}"
|
||||
atl_db_host: "{{ lookup('env', 'ATL_DB_HOST') }}"
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
aws_download_region: "{{ ansible_ec2_placement_region | default('us-west-2') }}"
|
||||
aws_cloudwatch_agent_rpm: "https://s3.{{ aws_download_region }}.amazonaws.com/amazoncloudwatch-agent-{{ aws_download_region }}/amazon_linux/amd64/latest/amazon-cloudwatch-agent.rpm"
|
||||
|
||||
atl_aws_enable_cw: true
|
||||
atl_aws_enable_cw_logs: false
|
||||
|
||||
# Mostly for molecule testing, as skip-tags doesn't work with handlers.
|
||||
atl_aws_agent_restart: true
|
||||
|
||||
|
||||
14
roles/aws_common/molecule/cw-disabled/Dockerfile.j2
Normal file
14
roles/aws_common/molecule/cw-disabled/Dockerfile.j2
Normal file
@@ -0,0 +1,14 @@
|
||||
# Molecule managed
|
||||
|
||||
{% if item.registry is defined %}
|
||||
FROM {{ item.registry.url }}/{{ item.image }}
|
||||
{% else %}
|
||||
FROM {{ item.image }}
|
||||
{% endif %}
|
||||
|
||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \
|
||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \
|
||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \
|
||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi
|
||||
26
roles/aws_common/molecule/cw-disabled/molecule.yml
Normal file
26
roles/aws_common/molecule/cw-disabled/molecule.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
dependency:
|
||||
name: galaxy
|
||||
driver:
|
||||
name: docker
|
||||
lint:
|
||||
name: yamllint
|
||||
platforms:
|
||||
- name: amazon_linux2
|
||||
image: amazonlinux:2
|
||||
groups:
|
||||
- aws_node_local
|
||||
# - name: ubuntu_lts
|
||||
# image: ubuntu:bionic
|
||||
provisioner:
|
||||
name: ansible
|
||||
lint:
|
||||
name: ansible-lint
|
||||
inventory:
|
||||
links:
|
||||
group_vars: ../../../../group_vars/
|
||||
verifier:
|
||||
name: testinfra
|
||||
lint:
|
||||
name: flake8
|
||||
enabled: false
|
||||
17
roles/aws_common/molecule/cw-disabled/playbook.yml
Normal file
17
roles/aws_common/molecule/cw-disabled/playbook.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
- name: Converge
|
||||
hosts: all
|
||||
vars:
|
||||
ansible_ec2_local_ipv4: "1.1.1.1"
|
||||
ansible_default_ipv4:
|
||||
address: "9.9.9.9"
|
||||
ansible_ec2_instance_id: "NONE"
|
||||
|
||||
atl_product_family: "jira"
|
||||
atl_product_edition: "jira-software"
|
||||
atl_aws_stack_name: "MY_STACK"
|
||||
|
||||
atl_aws_enable_cw: "{{ 'false'|bool }}"
|
||||
|
||||
roles:
|
||||
- role: aws_common
|
||||
23
roles/aws_common/molecule/cw-disabled/tests/test_default.py
Normal file
23
roles/aws_common/molecule/cw-disabled/tests/test_default.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import os
|
||||
import pytest
|
||||
|
||||
import testinfra.utils.ansible_runner
|
||||
|
||||
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
|
||||
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('exe', [
|
||||
'/usr/bin/ec2-metadata',
|
||||
'/usr/bin/amazon-ssm-agent',
|
||||
'/sbin/mount.efs'
|
||||
])
|
||||
def test_package_exes(host, exe):
|
||||
assert host.file(exe).exists
|
||||
|
||||
@pytest.mark.parametrize('path', [
|
||||
'/opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent',
|
||||
'/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json'
|
||||
])
|
||||
def test_package_not_installed(host, path):
|
||||
assert not host.file(path).exists
|
||||
@@ -11,6 +11,8 @@
|
||||
atl_product_edition: "jira-software"
|
||||
atl_aws_stack_name: "MY_STACK"
|
||||
|
||||
# The `bool` pipe is a sanity check for group file.
|
||||
atl_aws_enable_cw: "{{ 'true'|bool }}"
|
||||
atl_aws_enable_cw_logs: true
|
||||
atl_aws_agent_restart: false
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@
|
||||
atl_product_edition: "jira-software"
|
||||
atl_aws_stack_name: "MY_STACK"
|
||||
|
||||
atl_aws_enable_cw_logs: "false"
|
||||
atl_aws_enable_cw: true
|
||||
atl_aws_enable_cw_logs: false
|
||||
|
||||
atl_aws_agent_restart: false
|
||||
|
||||
|
||||
@@ -11,5 +11,6 @@
|
||||
yum:
|
||||
name:
|
||||
- "{{ aws_cloudwatch_agent_rpm }}"
|
||||
when: atl_aws_enable_cw is defined and atl_aws_enable_cw
|
||||
notify:
|
||||
- Enable CloudWatch Agent
|
||||
|
||||
@@ -20,5 +20,6 @@
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0644
|
||||
when: atl_aws_enable_cw is defined and atl_aws_enable_cw
|
||||
notify:
|
||||
- Restart CloudWatch Agent
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"run_as_user": "root"
|
||||
},
|
||||
|
||||
{% if atl_aws_enable_cw_logs == true or atl_aws_enable_cw_logs == 'true' %}
|
||||
{% if atl_aws_enable_cw_logs is defined and atl_aws_enable_cw_logs %}
|
||||
"logs": {
|
||||
"logs_collected": {
|
||||
"files": {
|
||||
|
||||
Reference in New Issue
Block a user