diff --git a/Pipfile b/Pipfile index 0baa144..e78f929 100644 --- a/Pipfile +++ b/Pipfile @@ -4,14 +4,16 @@ verify_ssl = true name = "pypi" [packages] -ansible = "==2.7.11" +ansible = "==2.8.5" +boto3 = "==1.9.242" +botocore = "==1.12.242" [dev-packages] -molecule = "==2.20.1" -docker = "==4.0.1" +molecule = "==2.20.2" +docker = "==4.1.0" six = "*" taskcat = "*" Jinja2 = "*" [requires] -python_version = "2.7" +python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 12753dd..725f9f6 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "d19b07115cf0a0e5ea9ce4283e43ee9c0efa52683080b730b8bf943ed87861e8" + "sha256": "4cec168800858d0bce3beaae422011cb6458d548e9a8fc1807f39bd7c8eb24e6" }, "pipfile-spec": 6, "requires": { - "python_version": "2.7" + "python_version": "3.7" }, "sources": [ { @@ -18,41 +18,33 @@ "default": { "ansible": { "hashes": [ - "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" + "sha256:8e9403e755ce8ef27b6066cdd7a4c567aa80ebe2fd90d0ff8efa0a725d246986" ], "index": "pypi", - "version": "==2.7.11" + "version": "==2.8.5" }, "asn1crypto": { "hashes": [ - "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87", - "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49" + "sha256:d02bf8ea1b964a5ff04ac7891fe3a39150045d1e5e4fe99273ba677d11b92a04", + "sha256:f822954b90c4c44f002e2cd46d636ab630f1fe4df22c816a82b66505c404eb2a" ], - "version": "==0.24.0" + "version": "==1.0.0" }, - "bcrypt": { + "boto3": { "hashes": [ - "sha256:0ba875eb67b011add6d8c5b76afbd92166e98b1f1efab9433d5dc0fafc76e203", - "sha256:21ed446054c93e209434148ef0b362432bb82bbdaf7beef70a32c221f3e33d1c", - "sha256:28a0459381a8021f57230954b9e9a65bb5e3d569d2c253c5cac6cb181d71cf23", - "sha256:2aed3091eb6f51c26b7c2fad08d6620d1c35839e7a362f706015b41bd991125e", - "sha256:2fa5d1e438958ea90eaedbf8082c2ceb1a684b4f6c75a3800c6ec1e18ebef96f", - "sha256:3a73f45484e9874252002793518da060fb11eaa76c30713faa12115db17d1430", - "sha256:3e489787638a36bb466cd66780e15715494b6d6905ffdbaede94440d6d8e7dba", - "sha256:44636759d222baa62806bbceb20e96f75a015a6381690d1bc2eda91c01ec02ea", - "sha256:678c21b2fecaa72a1eded0cf12351b153615520637efcadc09ecf81b871f1596", - "sha256:75460c2c3786977ea9768d6c9d8957ba31b5fbeb0aae67a5c0e96aab4155f18c", - "sha256:8ac06fb3e6aacb0a95b56eba735c0b64df49651c6ceb1ad1cf01ba75070d567f", - "sha256:8fdced50a8b646fff8fa0e4b1c5fd940ecc844b43d1da5a980cb07f2d1b1132f", - "sha256:9b2c5b640a2da533b0ab5f148d87fb9989bf9bcb2e61eea6a729102a6d36aef9", - "sha256:a9083e7fa9adb1a4de5ac15f9097eb15b04e2c8f97618f1b881af40abce382e1", - "sha256:b7e3948b8b1a81c5a99d41da5fb2dc03ddb93b5f96fcd3fd27e643f91efa33e1", - "sha256:b998b8ca979d906085f6a5d84f7b5459e5e94a13fc27c28a3514437013b6c2f6", - "sha256:dd08c50bc6f7be69cd7ba0769acca28c846ec46b7a8ddc2acf4b9ac6f8a7457e", - "sha256:de5badee458544ab8125e63e39afeedfcf3aef6a6e2282ac159c95ae7472d773", - "sha256:ede2a87333d24f55a4a7338a6ccdccf3eaa9bed081d1737e0db4dbd1a4f7e6b6" + "sha256:4189e1ffed768bd0efd754a0abedebce19495ba2aa6b2f5e20f29ba80f81f9cb", + "sha256:fa4e28166922feeb9b7b56134c1acc817a1bca36284a0035bc08a3dab1853a9f" ], - "version": "==3.1.6" + "index": "pypi", + "version": "==1.9.242" + }, + "botocore": { + "hashes": [ + "sha256:7af52e0aabaf4ba045e1a5832308e70e1ea4b499b71624857f09aed2ba5e667c", + "sha256:dd62d63bcd3176c92775c52d3e879288f89bf0ac0039df14ea31f25d693acd6d" + ], + "index": "pypi", + "version": "==1.12.242" }, "cffi": { "hashes": [ @@ -108,23 +100,13 @@ ], "version": "==2.7" }, - "enum34": { + "docutils": { "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", + "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", + "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" + "version": "==0.15.2" }, "jinja2": { "hashes": [ @@ -133,6 +115,13 @@ ], "version": "==2.10.1" }, + "jmespath": { + "hashes": [ + "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", + "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" + ], + "version": "==0.9.4" + }, "markupsafe": { "hashes": [ "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", @@ -166,58 +155,44 @@ ], "version": "==1.1.1" }, - "paramiko": { - "hashes": [ - "sha256:69c219df239775800a2589ee60159aa7cfd87175809b6557da7fb9dcb44ca430", - "sha256:9f081281064b5180dc0ef60e256224a280ff16f603a99f3dd4ba6334ebb65f7e" - ], - "version": "==2.5.0" - }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" ], "version": "==2.19" }, - "pynacl": { + "python-dateutil": { "hashes": [ - "sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255", - "sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c", - "sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e", - "sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae", - "sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621", - "sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56", - "sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39", - "sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310", - "sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1", - "sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a", - "sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786", - "sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b", - "sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b", - "sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f", - "sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20", - "sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415", - "sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715", - "sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1", - "sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0" + "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", + "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" ], - "version": "==1.3.0" + "markers": "python_version >= '2.7'", + "version": "==2.8.0" }, "pyyaml": { "hashes": [ - "sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3", - "sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043", - "sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7", - "sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265", - "sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391", - "sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778", - "sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225", - "sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955", - "sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e", - "sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190", - "sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd" + "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", + "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", + "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", + "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", + "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", + "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", + "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", + "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", + "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", + "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", + "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", + "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", + "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" ], - "version": "==5.1.1" + "version": "==5.1.2" + }, + "s3transfer": { + "hashes": [ + "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", + "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba" + ], + "version": "==0.2.1" }, "six": { "hashes": [ @@ -225,15 +200,23 @@ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], "version": "==1.12.0" + }, + "urllib3": { + "hashes": [ + "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", + "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" + ], + "markers": "python_version >= '3.4'", + "version": "==1.25.6" } }, "develop": { "ansible": { "hashes": [ - "sha256:e7e6de461b7d07cb4d8b2dd2a32b231af7c56e6bf39b851024671aaa52fd377e" + "sha256:8e9403e755ce8ef27b6066cdd7a4c567aa80ebe2fd90d0ff8efa0a725d246986" ], "index": "pypi", - "version": "==2.7.11" + "version": "==2.8.5" }, "ansible-lint": { "hashes": [ @@ -250,17 +233,17 @@ }, "arrow": { "hashes": [ - "sha256:03404b624e89ac5e4fc19c52045fa0f3203419fd4dd64f6e8958c522580a574a", - "sha256:41be7ea4c53c2cf57bf30f2d614f60c411160133f7a0a8c49111c30fb7e725b5" + "sha256:10257c5daba1a88db34afa284823382f4963feca7733b9107956bed041aff24f", + "sha256:c2325911fcd79972cf493cfd957072f9644af8ad25456201ae1ede3316576eb4" ], - "version": "==0.14.2" + "version": "==0.15.2" }, "asn1crypto": { "hashes": [ - "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87", - "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49" + "sha256:d02bf8ea1b964a5ff04ac7891fe3a39150045d1e5e4fe99273ba677d11b92a04", + "sha256:f822954b90c4c44f002e2cd46d636ab630f1fe4df22c816a82b66505c404eb2a" ], - "version": "==0.24.0" + "version": "==1.0.0" }, "atomicwrites": { "hashes": [ @@ -271,31 +254,16 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2", + "sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396" ], - "version": "==19.1.0" + "version": "==19.2.0" }, "aws-sam-translator": { "hashes": [ - "sha256:db872c43bdfbbae9fc8c9201e6a7aeb9a661cda116a94708ab0577b46a38b962" + "sha256:6563aa3b534e7ad672d580ecd3dfa92021e81b4e5983604c0df7ee0a07b3ed99" ], - "version": "==1.11.0" - }, - "backports.functools-lru-cache": { - "hashes": [ - "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a", - "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd" - ], - "markers": "python_version == '2.7'", - "version": "==1.5" - }, - "backports.ssl-match-hostname": { - "hashes": [ - "sha256:bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2" - ], - "markers": "python_version < '3.5'", - "version": "==3.7.0.1" + "version": "==1.15.0" }, "binaryornot": { "hashes": [ @@ -306,17 +274,19 @@ }, "boto3": { "hashes": [ - "sha256:794a9a4b6a9e40c1ac57a377de609872d28d62afe4295c48cdc1b1c92f96ab8e", - "sha256:962b078568cc520869ea2842f307864c9abc30ad5ed160e12b2a89debf220161" + "sha256:4189e1ffed768bd0efd754a0abedebce19495ba2aa6b2f5e20f29ba80f81f9cb", + "sha256:fa4e28166922feeb9b7b56134c1acc817a1bca36284a0035bc08a3dab1853a9f" ], - "version": "==1.9.168" + "index": "pypi", + "version": "==1.9.242" }, "botocore": { "hashes": [ - "sha256:675f2b66af486dd02f5825601bb0c8378773999f8705c6f75450849ca41fed80", - "sha256:c3fc314c0e0aa13aa024d272d991e23d37550050abf96b3c7dea889ed1743723" + "sha256:7af52e0aabaf4ba045e1a5832308e70e1ea4b499b71624857f09aed2ba5e667c", + "sha256:dd62d63bcd3176c92775c52d3e879288f89bf0ac0039df14ea31f25d693acd6d" ], - "version": "==1.12.168" + "index": "pypi", + "version": "==1.12.242" }, "cerberus": { "hashes": [ @@ -326,10 +296,10 @@ }, "certifi": { "hashes": [ - "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", - "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae" + "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", + "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" ], - "version": "==2019.3.9" + "version": "==2019.9.11" }, "cffi": { "hashes": [ @@ -366,10 +336,10 @@ }, "cfn-lint": { "hashes": [ - "sha256:16500272b5e2a3e9eb94e6b42c0a652b1a084fa96f8c5efb07ff4adde3b448ec", - "sha256:ce4bf8c0e6d5b8ad3f1b4cd8261e1eca795d61fb3723e3dce85c78eff95ab120" + "sha256:32a3e1597c681c9411205bff48b421db60908c304c472f4644d5a32bc9ecdad3", + "sha256:623cf0f6ed4c7b3fb4563549e25ac68119478900d89ca976639f11c5d85063a6" ], - "version": "==0.21.5" + "version": "==0.24.3" }, "chardet": { "hashes": [ @@ -398,22 +368,6 @@ ], "version": "==0.3.9" }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3.2'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, "cookiecutter": { "hashes": [ "sha256:1316a52e1c1f08db0c9efbf7d876dbc01463a74b155a0d83e722be88beda9a3e", @@ -444,19 +398,19 @@ }, "docker": { "hashes": [ - "sha256:3db499d4d25847fed86acf8e100c989f7bc0f75a6fff6c52855726ada1d124f6", - "sha256:f61c37d721b489b7d55ef631b241be2d6a5884c3ffe63dc8f7dd9a3c3cd60489" + "sha256:6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1", + "sha256:8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7" ], "index": "pypi", - "version": "==4.0.1" + "version": "==4.1.0" }, "docutils": { "hashes": [ - "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", - "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", - "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" + "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", + "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", + "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" ], - "version": "==0.14" + "version": "==0.15.2" }, "entrypoints": { "hashes": [ @@ -465,16 +419,6 @@ ], "version": "==0.3" }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, "fasteners": { "hashes": [ "sha256:007e4d2b2d4a10093f67e932e5166722d2eab83b77724156e92ad013c6226574", @@ -484,26 +428,10 @@ }, "flake8": { "hashes": [ - "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661", - "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8" + "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", + "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696" ], - "version": "==3.7.7" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.0'", - "version": "==1.0.2" - }, - "functools32": { - "hashes": [ - "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", - "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" - ], - "markers": "python_version == '2.7'", - "version": "==3.2.3.post2" + "version": "==3.7.8" }, "future": { "hashes": [ @@ -511,14 +439,6 @@ ], "version": "==0.17.1" }, - "futures": { - "hashes": [ - "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", - "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" - ], - "markers": "python_version == '2.6' or python_version == '2.7'", - "version": "==3.2.0" - }, "git-url-parse": { "hashes": [ "sha256:4655ee22f1d8bf7a1eb1066c1da16529b186966c6d8331f7f55686a76a9f7aef", @@ -536,18 +456,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7", - "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db" + "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", + "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" ], - "version": "==0.18" - }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" + "markers": "python_version < '3.8'", + "version": "==0.23" }, "jinja2": { "hashes": [ @@ -572,10 +485,10 @@ }, "jsonpatch": { "hashes": [ - "sha256:49f29cab70e9068db3b1dc6b656cbe2ee4edf7dfe9bf5a0055f17a4b6804a4b9", - "sha256:8bf92fa26bc42c346c03bd4517722a8e4f429225dbe775ac774b2c70d95dbd33" + "sha256:83f29a2978c13da29bfdf89da9d65542d62576479caf215df19632d7dc04c6e6", + "sha256:cbb72f8bf35260628aea6b508a107245f757d1ec839a19c34349985e2c05645a" ], - "version": "==1.23" + "version": "==1.24" }, "jsonpointer": { "hashes": [ @@ -586,10 +499,10 @@ }, "jsonschema": { "hashes": [ - "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08", - "sha256:6ff5f3180870836cae40f06fa10419f557208175f13ad7bc26caa77beb1f6e02" + "sha256:5f9c0a719ca2ce14c5de2fd350a64fd2d13e8539db29836a86adc990bb1a068f", + "sha256:8d4a2b7b6c2237e0199c8ea1a6d3e05bf118e289ae2b9d7ba444182a2959560d" ], - "version": "==2.6.0" + "version": "==3.0.2" }, "markupsafe": { "hashes": [ @@ -640,11 +553,11 @@ }, "molecule": { "hashes": [ - "sha256:0e9ef6845cdf2a01f6c386445e4e54add3f515a033ee16b7b658e6122c8f0d76", - "sha256:621797c54299775f284bbb010d5bb9be485500eecaaa14a476cbc0df285d0da7" + "sha256:5fa56e52602364716dd5aa55e1dd70400f2094b8cc3c458869e5382e84149065", + "sha256:9dc29b9ef172b26532752784687faca2e868c84e2d90f0b4f018d81d76a8b30a" ], "index": "pypi", - "version": "==2.20.1" + "version": "==2.20.2" }, "monotonic": { "hashes": [ @@ -655,32 +568,23 @@ }, "more-itertools": { "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", + "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" + "version": "==7.2.0" }, "packaging": { "hashes": [ - "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", - "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" ], - "version": "==19.0" - }, - "pathlib2": { - "hashes": [ - "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", - "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7" - ], - "version": "==2.3.3" + "version": "==19.2" }, "pathspec": { "hashes": [ - "sha256:54a5eab895d89f342b52ba2bffe70930ef9f8d96e398cccf530d21fa0516a873" + "sha256:e285ccc8b0785beadd4c18e5708b12bb8fcf529a1e61215b3feff1d1e559ea5c" ], - "version": "==0.5.9" + "version": "==0.6.0" }, "pbr": { "hashes": [ @@ -698,17 +602,17 @@ }, "pluggy": { "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" + "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", + "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" ], - "version": "==0.12.0" + "version": "==0.13.0" }, "poyo": { "hashes": [ - "sha256:c34a5413191210ed564640510e9c4a4ba3b698746d6b454d46eb5bfb30edcd1d", - "sha256:d1c317054145a6b1ca0608b5e676b943ddc3bfd671f886a2fe09288b98221edb" + "sha256:3e2ca8e33fdc3c411cd101ca395668395dd5dc7ac775b8e809e3def9f9fe041a", + "sha256:e26956aa780c45f011ca9886f044590e2d8fd8b61db7b1c1cf4e0869f48ed4dd" ], - "version": "==0.4.2" + "version": "==0.5.0" }, "psutil": { "hashes": [ @@ -753,13 +657,6 @@ ], "version": "==2.19" }, - "pyfiglet": { - "hashes": [ - "sha256:c6c2321755d09267b438ec7b936825a4910fec696292139e664ca8670e103639", - "sha256:d555bcea17fbeaf70eaefa48bb119352487e629c9b56f30f383e2c62dd67a01c" - ], - "version": "==0.8.post1" - }, "pyflakes": { "hashes": [ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", @@ -769,17 +666,23 @@ }, "pyparsing": { "hashes": [ - "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a", - "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03" + "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", + "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" ], - "version": "==2.4.0" + "version": "==2.4.2" + }, + "pyrsistent": { + "hashes": [ + "sha256:34b47fa169d6006b32e99d4b3c4031f155e6e68ebcc107d6454852e8e0ee6533" + ], + "version": "==0.15.4" }, "pytest": { "hashes": [ - "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45", - "sha256:926855726d8ae8371803f7b2e6ec0a69953d9c6311fa7c3b6c1b929ff92d27da" + "sha256:13c1c9b22127a77fc684eee24791efafcef343335d855e3573791c68588fe1a5", + "sha256:d8ba7be9466f55ef96ba203fc0f90d0cf212f2f927e69186e1353e30bc7f62e5" ], - "version": "==4.6.3" + "version": "==5.2.0" }, "python-dateutil": { "hashes": [ @@ -799,19 +702,21 @@ }, "pyyaml": { "hashes": [ - "sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3", - "sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043", - "sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7", - "sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265", - "sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391", - "sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778", - "sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225", - "sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955", - "sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e", - "sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190", - "sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd" + "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", + "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", + "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", + "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", + "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", + "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", + "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", + "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", + "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", + "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", + "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", + "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", + "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" ], - "version": "==5.1.1" + "version": "==5.1.2" }, "requests": { "hashes": [ @@ -820,40 +725,36 @@ ], "version": "==2.22.0" }, - "ruamel.ordereddict": { - "hashes": [ - "sha256:08b4b19fe518d32251a5338e039c4dc9eb0876f2919f94c9b8d2f9446ea80806", - "sha256:150ce8e6c514a2a2b62753622a75874962561f8e5eeec81a3172ab952807bf0b", - "sha256:45541836cbfdde630033cae7bbbe35acbac87a0ceec79f944b7a3bedd940fe78", - "sha256:854dd4a524811b16111b1107d8a751e4ca064d2bb103d3d91deab75de36b6620", - "sha256:aee2fa23e884249b4284b728888c553d551e5bfd4de2731f10153fd7813ec55f", - "sha256:bf0a198c8ce5d973c24e5dba12d3abc254996788ca6ad8448eabc6aa710db149" - ], - "markers": "platform_python_implementation == 'CPython' and python_version <= '2.7'", - "version": "==0.4.13" - }, "ruamel.yaml": { "hashes": [ - "sha256:17dbf6b7362e7aee8494f7a0f5cffd44902a6331fe89ef0853b855a7930ab845", - "sha256:23731c9efb79f3f5609dedffeb6c5c47a68125fd3d4b157d9fc71b1cd49076a9", - "sha256:2bbdd598ae57bac20968cf9028cc67d37d83bdb7942a94b9478110bc72193148", - "sha256:34586084cdd60845a3e1bece2b58f0a889be25450db8cc0ea143ddf0f40557a2", - "sha256:35957fedbb287b01313bb5c556ffdc70c0277c3500213b5e73dfd8716f748d77", - "sha256:414cb87a40974a575830b406ffab4ab8c6cbd82eeb73abd2a9d1397c1f0223e1", - "sha256:428775be75db68d908b17e4e8dda424c410222f170dc173246aa63e972d094b3", - "sha256:514f670f7d36519bda504d507edfe63e3c20489f86c86d42bc4d9a6dbdf82c7b", - "sha256:5cb962c1ac6887c5da29138fbbe3b4b7705372eb54e599907fa63d4cd743246d", - "sha256:5f6e30282cf70fb7754e1a5f101e27b5240009766376e131b31ab49f14fe81be", - "sha256:86f8e010af6af0b4f42de2d0d9b19cb441e61d3416082186f9dd03c8552d13ad", - "sha256:8d47ed1e557d546bd2dfe54f504d7274274602ff7a0652cde84c258ad6c2d96d", - "sha256:98668876720bce1ac08562d8b93a564a80e3397e442c7ea19cebdcdf73da7f74", - "sha256:9e1f0ddc18d8355dcf5586a5d90417df56074f237812b8682a93b62cca9d2043", - "sha256:a7bc812a72a79d6b7dbb96fa5bee3950464b65ec055d3abc4db6572f2373a95c", - "sha256:b72e13f9f206ee103247b07afd5a39c8b1aa98e8eba80ddba184d030337220ba", - "sha256:bcff8ea9d916789e85e24beed8830c157fb8bc7c313e554733a8151540e66c01", - "sha256:c76e78b3bab652069b8d6f7889b0e72f3455c2b854b2e0a8818393d149ad0a0d" + "sha256:0db639b1b2742dae666c6fc009b8d1931ef15c9276ef31c0673cc6dcf766cf40", + "sha256:412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408" ], - "version": "==0.15.97" + "version": "==0.16.5" + }, + "ruamel.yaml.clib": { + "hashes": [ + "sha256:1e77424825caba5553bbade750cec2277ef130647d685c2b38f68bc03453bac6", + "sha256:392b7c371312abf27fb549ec2d5e0092f7ef6e6c9f767bfb13e83cb903aca0fd", + "sha256:4d55386129291b96483edcb93b381470f7cd69f97585829b048a3d758d31210a", + "sha256:550168c02d8de52ee58c3d8a8193d5a8a9491a5e7b2462d27ac5bf63717574c9", + "sha256:57933a6986a3036257ad7bf283529e7c19c2810ff24c86f4a0cfeb49d2099919", + "sha256:615b0396a7fad02d1f9a0dcf9f01202bf9caefee6265198f252c865f4227fcc6", + "sha256:77556a7aa190be9a2bd83b7ee075d3df5f3c5016d395613671487e79b082d784", + "sha256:7aee724e1ff424757b5bd8f6c5bbdb033a570b2b4683b17ace4dbe61a99a657b", + "sha256:8073c8b92b06b572e4057b583c3d01674ceaf32167801fe545a087d7a1e8bf52", + "sha256:9c6d040d0396c28d3eaaa6cb20152cb3b2f15adf35a0304f4f40a3cf9f1d2448", + "sha256:a0ff786d2a7dbe55f9544b3f6ebbcc495d7e730df92a08434604f6f470b899c5", + "sha256:b1b7fcee6aedcdc7e62c3a73f238b3d080c7ba6650cd808bce8d7761ec484070", + "sha256:b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c", + "sha256:d0d3ac228c9bbab08134b4004d748cf9f8743504875b3603b3afbb97e3472947", + "sha256:d10e9dd744cf85c219bf747c75194b624cc7a94f0c80ead624b06bfa9f61d3bc", + "sha256:ea4362548ee0cbc266949d8a441238d9ad3600ca9910c3fe4e82ee3a50706973", + "sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad", + "sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e" + ], + "markers": "platform_python_implementation == 'CPython' and python_version < '3.8'", + "version": "==0.2.0" }, "s3transfer": { "hashes": [ @@ -862,23 +763,6 @@ ], "version": "==0.2.1" }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, "sh": { "hashes": [ "sha256:ae3258c5249493cebe73cb4e18253a41ed69262484bad36fdb3efcb8ad8870bb", @@ -901,17 +785,17 @@ }, "taskcat": { "hashes": [ - "sha256:af4149d6b951cbc4974e5a03f8eb8c137e1f81ac98a348715eb6dc287f728b2a" + "sha256:e84eb198c74ca677b589889d4e6877568e25858235d51cdd99a8128d525b63b2" ], "index": "pypi", - "version": "==0.8.35" + "version": "==0.8.47" }, "testinfra": { "hashes": [ - "sha256:8dbbf25039674d419598f576c5652947cebdf7cbbea8f23acacc80271009c6cb", - "sha256:d13dda899d5a051465f041a821363e2ebdd079391fbeae04089a2df7d35e3d54" + "sha256:16201d64659ec0c2d25f65d6ce1f5367668b7b4eb102450efd4f8983a399d7d0", + "sha256:5cebf61fee13c2e83b5e177431e751e243fc779293377c5e0c3b43910bb7e870" ], - "version": "==1.19.0" + "version": "==3.2.0" }, "tree-format": { "hashes": [ @@ -920,22 +804,13 @@ ], "version": "==0.1.2" }, - "typing": { - "hashes": [ - "sha256:4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d", - "sha256:57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4", - "sha256:a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a" - ], - "markers": "python_version < '3.5'", - "version": "==3.6.6" - }, "urllib3": { "hashes": [ - "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", - "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232" + "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", + "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" ], - "markers": "python_version == '2.7'", - "version": "==1.25.3" + "markers": "python_version >= '3.4'", + "version": "==1.25.6" }, "wcwidth": { "hashes": [ @@ -953,30 +828,30 @@ }, "whichcraft": { "hashes": [ - "sha256:7533870f751901a0ce43c93cc9850186e9eba7fe58c924dfb435968ba9c9fa4e", - "sha256:fecddd531f237ffc5db8b215409afb18fa30300699064cca4817521b4fc81815" + "sha256:acdbb91b63d6a15efbd6430d1d7b2d36e44a71697e93e19b7ded477afd9fce87", + "sha256:deda9266fbb22b8c64fd3ee45c050d61139cd87419765f588e37c8d23e236dd9" ], - "version": "==0.5.2" + "version": "==0.6.1" }, "yamllint": { "hashes": [ - "sha256:9a4fec2d40804979de5f54453fd1551bc1f8b59a7ad4a26fd7f26aeca34a83af", - "sha256:f97cd763fe7b588444a94cc44fd3764b832a613b5250baa2bfe8b84c91e4c330" + "sha256:67173339f28868260ce5912abfefa10e115ceb1d2ac1c4d8c7acc8c4ef6c9a8a", + "sha256:70a6f8316851254e197a6231c35577be29fa2fbe2c77390a54c9a50217cdaa13" ], - "version": "==1.16.0" + "version": "==1.17.0" }, "yattag": { "hashes": [ - "sha256:d7214d100315093e3ddc34da9840acbfa65c79ec84b48a8191ddf535353c2e3f" + "sha256:47d1c842e0da596bac081fcc047f2d6fd778b16d20745a28c00ce99d80831fbc" ], - "version": "==1.11.2" + "version": "==1.12.2" }, "zipp": { "hashes": [ - "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d", - "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3" + "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", + "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" ], - "version": "==0.5.1" + "version": "==0.6.0" } } } diff --git a/aws_jira_dc_node.yml b/aws_jira_dc_node.yml index c349bb8..be81d83 100644 --- a/aws_jira_dc_node.yml +++ b/aws_jira_dc_node.yml @@ -13,6 +13,11 @@ atl_startup_systemd_params: - "LimitNOFILE=16384" + atl_jdbc_encoding: 'UNICODE' + atl_jdbc_collation: 'C' + atl_jdbc_ctype: 'C' + atl_jdbc_template: 'template0' + roles: - role: linux_common - role: aws_common @@ -20,5 +25,6 @@ - role: product_common - role: product_install - role: database_init + - role: restore_backups - role: jira_config - role: product_startup diff --git a/bin/ansible-with-atl-env b/bin/ansible-with-atl-env index 29d5fee..3685381 100755 --- a/bin/ansible-with-atl-env +++ b/bin/ansible-with-atl-env @@ -8,6 +8,9 @@ PLAYBOOK=${2:?"Playbook must be specified"} LOG_FILE=${3:-"/dev/null"} ENV_FILE=${4:-"/etc/atl"} +export PATH=/usr/local/bin:$PATH + + # Set the environment with default exports set -a source $ENV_FILE diff --git a/bin/install-ansible b/bin/install-ansible index a6da6bf..196562e 100755 --- a/bin/install-ansible +++ b/bin/install-ansible @@ -2,17 +2,26 @@ set -e -# The Amazon Linux 2 Ansible package is 2.4, which has issue -# interacting with RDS, so use pipenv to install a known-good version. -# Another alternative here would be nix, however that has issues -# installing as root, and can be slow in practice. +source /etc/os-release +if [[ $ID = "amzn" ]]; then + yum install -y \ + python3-devel \ + python3-pip \ + python2-boto3 \ + python2-botocore -# Luckily AmazonLinux2 and Ubuntu use the same package name for -# pip. This may need some logic if other distros are added. Note: -# Parsing /etc/os-release is probably a good starting point for that. -./bin/pacapt install --noconfirm python-pip -export PATH=$PATH:/usr/local/bin +else + # FIXME: Currently assumes Debian-based + apt-get update && \ + apt-get install -y \ + python3-dev \ + python3-pip +fi +export PATH=/usr/local/bin:$PATH -# See Pipfile and Pipfile.lock. -pip install pipenv +pip3 install pipenv pipenv sync + +if [[ $1 == "--dev" ]]; then + pipenv sync --dev +fi diff --git a/bin/pacapt b/bin/pacapt deleted file mode 100755 index bf1b711..0000000 --- a/bin/pacapt +++ /dev/null @@ -1,2561 +0,0 @@ -#!/usr/bin/env bash -# -# Purpose: A wrapper for all Unix package managers -# License: Fair license (http://www.opensource.org/licenses/fair) -# Source : http://github.com/icy/pacapt/ -# Version: 2.4.2 -# Authors: Anh K. Huynh et al. - -# Copyright (C) 2010 - 2019 \ -# | 10sr (10sr) -# | Alexander Dupuy (dupuy) -# | Anh K. Huynh (icy) -# | Antony Lee (anntzer) -# | Alex Lyon (Arcterus) -# | Carl X. Su (bcbcarl) -# | Cuong Manh Le (Gnouc) -# | Daniel YC Lin (dlintw) -# | Danny George (dangets) -# | Darshit Shah (darnir) -# | Dmitry Kudriavtsev (dkudriavtsev) -# | Eric Crosson (EricCrosson) -# | Evan Relf (evanrelf) -# | GijsTimmers (GijsTimmers) -# | Hà-Dương Nguyễn (cmpitg) -# | Huy Ngô (NgoHuy) -# | James Pearson (xiongchiamiov) -# | Janne Heß (dasJ) -# | Jiawei Zhou (4679) -# | Karol Blazewicz -# | Kevin Brubeck (unhammer) -# | Konrad Borowski (xfix) -# | Kylie McClain (somasis) -# | Valerio Pizzi (Pival81) -# | Siôn Le Roux (sinisterstuf) -# | Thiago Perrotta (thiagowfx) -# | Vojtech Letal (letalvoj) -# -# Usage of the works is permitted provided that this instrument is -# retained with the works, so that any entity that uses the works is -# notified of this instrument. -# -# DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY. -# - -_print_pacapt_version() { - cat <<_EOF_ -pacapt version '2.4.2' - -Copyright (C) 2010 - 2019 \\ - | 10sr (10sr) - | Alexander Dupuy (dupuy) - | Anh K. Huynh (icy) - | Antony Lee (anntzer) - | Alex Lyon (Arcterus) - | Carl X. Su (bcbcarl) - | Cuong Manh Le (Gnouc) - | Daniel YC Lin (dlintw) - | Danny George (dangets) - | Darshit Shah (darnir) - | Dmitry Kudriavtsev (dkudriavtsev) - | Eric Crosson (EricCrosson) - | Evan Relf (evanrelf) - | GijsTimmers (GijsTimmers) - | Hà-Dương Nguyễn (cmpitg) - | Huy Ngô (NgoHuy) - | James Pearson (xiongchiamiov) - | Janne Heß (dasJ) - | Jiawei Zhou (4679) - | Karol Blazewicz - | Kevin Brubeck (unhammer) - | Konrad Borowski (xfix) - | Kylie McClain (somasis) - | Valerio Pizzi (Pival81) - | Siôn Le Roux (sinisterstuf) - | Thiago Perrotta (thiagowfx) - | Vojtech Letal (letalvoj) - -Usage of the works is permitted provided that this -instrument is retained with the works, so that any -entity that uses the works is notified of this instrument. - -DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY. -_EOF_ -} - -export PACAPT_VERSION='2.4.2' - -_help() { - cat <<'EOF' -NAME - pacapt - An `ArchLinux`'s pacman-like wrapper for many package managers. - -SYNTAX - - $ pacapt - -BASIC OPTIONS - - -h or --help print this help message - -P print supported operations - -V print version information - -SYSGET STYLE OPERATIONS - - update Update package database - upgrade Upgrade system - install Install some packages - search Search some package - remove Remove some packages - autoremove Remove orphans (WIP; may not work correctly) - clean Clean package manager caches - -PACMAN STYLE OPERATIONS - - Query - -Q list all installed packages - -Qc show package's changelog - -Qe [] only list explicitly installed packages - -Qi print package status - -Ql list package's files - -Qm list installed packages that aren't available - in any installation source - -Qo query package that provides - -Qp query a package file (don't use package database) - -Qs search for installed package - - Synchronize - -S install package(s) - -Sg list groups - -Sg list packages in group - -Ss search for packages - -Su upgrade the system - -Sy update package database - -Suy update package database, then upgrade the system - - Remove / Clean up - -R remove some packages - -Sc delete old downloaded packages - -Scc delete all downloaded packages - -Sccc clean variant files. - (debian) See also http://dragula.viettug.org/blogs/646 - - Upgrade - -U upgrade or add package from local file path (or remote uri) - -OPTIONS - - -w download packages but don't install them - --noconfirm don't wait for user's confirmation - -EXAMPLES - - 1. To install a package from Debian's backports repository - $ pacapt -S foobar -t lenny-backports - $ pacapt -S -- -t lenny-backports foobar - - 2. To update package database and then update your system - $ pacapt -Syu - - 3. To download a package without installing it - $ pacapt -Sw foobar - - -ENVIRONMENT - - PACAPT_DEBUG - - This is useful for debugging purpose. The variable can be set to `auto` - or any valid packager. For example, on `Debian` system the two following - commands are the same and they will print out what the script would do: - - PACAPT_DEBUG=auto pacman -Su - PACAPT_DEBUG=dpkg pacman -Su - -NOTES - - When being executed on Arch-based system, the tool simply invokes - the system package manager (`/usr/bin/pacman`). - - Though you can specify option by its own word, for example, - $ pacapt -S -y -u - - it's always the best to combine them - $ pacapt -Syu - -READMORE - - Please visit https://github.com/icy/pacapt. -EOF - -} - - - - -_error() { - echo >&2 "Error: $*" - return 1 -} - -_warn() { - echo >&2 "Warning: $*" - return 0 -} - -_die() { - echo >&2 "$@" - exit 1 -} - -_not_implemented() { - # shellcheck disable=2153 - echo >&2 "${_PACMAN}: '${_POPT}:${_SOPT}:${_TOPT}' operation is invalid or not implemented." - return 1 -} - -_removing_is_dangerous() { - echo >&2 "${_PACMAN}: removing with '$*' is too dangerous" - return 1 -} - -_issue2pacman() { - local _pacman - - _pacman="$1"; shift - - # The following line is added by Daniel YC Lin to support SunOS. - # - # [ `uname` = "$1" ] && _PACMAN="$_pacman" && return - # - # This is quite tricky and fast, however I don't think it works - # on Linux/BSD systems. To avoid extra check, I slightly modify - # the code to make sure it's only applicable on SunOS. - # - [[ "$(uname)" == "SunOS" ]] && _PACMAN="$_pacman" && return - - $GREP -qis "$@" /etc/issue \ - && _PACMAN="$_pacman" && return - - $GREP -qis "$@" /etc/os-release \ - && _PACMAN="$_pacman" && return -} - -_PACMAN_detect() { - _PACMAN_found_from_script_name && return - - _issue2pacman sun_tools "SunOS" && return - _issue2pacman pacman "Arch Linux" && return - _issue2pacman dpkg "Debian GNU/Linux" && return - _issue2pacman dpkg "Ubuntu" && return - _issue2pacman cave "Exherbo Linux" && return - _issue2pacman yum "CentOS" && return - _issue2pacman yum "Red Hat" && return - # - # FIXME: The multiple package issue. - # - # On #63, Huy commented out this line. This is because new generation - # of Fedora uses `dnf`, and `yum` becomes a legacy tool. On old Fedora - # system, `yum` is still detectable by looking up `yum` binary. - # - # I'm not sure how to support this case easily. Let's wait, e.g, 5 years - # from now to make `dnf` becomes a default? Oh no! - # - # And here why `pacman` is still smart. Debian has a set of tools. - # Fedora has `yum` (and a set of add-ons). Now Fedora moves to `dnf`. - # This means that a package manager is not a heart of a system ;) - # - # _issue2pacman yum "Fedora" && return - _issue2pacman zypper "SUSE" && return - _issue2pacman pkg_tools "OpenBSD" && return - _issue2pacman pkg_tools "Bitrig" && return - _issue2pacman apk "Alpine Linux" && return - - [[ -z "$_PACMAN" ]] || return - - # Prevent a loop when this script is installed on non-standard system - if [[ -x "/usr/bin/pacman" ]]; then - $GREP -q "${FUNCNAME[0]}" '/usr/bin/pacman' >/dev/null 2>&1 - [[ $? -ge 1 ]] && _PACMAN="pacman" \ - && return - fi - - [[ -x "/usr/bin/apt-get" ]] && _PACMAN="dpkg" && return - [[ -x "/data/data/com.termux/files/usr/bin/apt-get" ]] && _PACMAN="dpkg" && return - [[ -x "/usr/bin/cave" ]] && _PACMAN="cave" && return - [[ -x "/usr/bin/dnf" ]] && _PACMAN="dnf" && return - [[ -x "/usr/bin/yum" ]] && _PACMAN="yum" && return - [[ -x "/opt/local/bin/port" ]] && _PACMAN="macports" && return - [[ -x "/usr/bin/emerge" ]] && _PACMAN="portage" && return - [[ -x "/usr/bin/zypper" ]] && _PACMAN="zypper" && return - [[ -x "/usr/sbin/pkg" ]] && _PACMAN="pkgng" && return - # make sure pkg_add is after pkgng, FreeBSD base comes with it until converted - [[ -x "/usr/sbin/pkg_add" ]] && _PACMAN="pkg_tools" && return - [[ -x "/usr/sbin/pkgadd" ]] && _PACMAN="sun_tools" && return - [[ -x "/sbin/apk" ]] && _PACMAN="apk" && return - [[ -x "/usr/bin/tazpkg" ]] && _PACMAN="tazpkg" && return - [[ -x "/usr/bin/swupd" ]] && _PACMAN="swupd" && return - - command -v brew >/dev/null && _PACMAN="homebrew" && return - - return 1 -} - -_translate_w() { - - echo "$_EOPT" | $GREP -q ":w:" || return 0 - - local _opt= - local _ret=0 - - case "$_PACMAN" in - "dpkg") _opt="-d";; - "cave") _opt="-f";; - "macports") _opt="fetch";; - "portage") _opt="--fetchonly";; - "zypper") _opt="--download-only";; - "pkgng") _opt="fetch";; - "yum") _opt="--downloadonly"; - if ! rpm -q 'yum-downloadonly' >/dev/null 2>&1; then - _error "'yum-downloadonly' package is required when '-w' is used." - _ret=1 - fi - ;; - "tazpkg") - _error "$_PACMAN: Use '$_PACMAN get' to download and save packages to current directory." - _ret=1 - ;; - "apk") _opt="fetch";; - *) - _opt="" - _ret=1 - - _error "$_PACMAN: Option '-w' is not supported/implemented." - ;; - esac - - echo $_opt - return "$_ret" -} - -_translate_debug() { - echo "$_EOPT" | $GREP -q ":v:" || return 0 - - case "$_PACMAN" in - "tazpkg") - _error "$_PACMAN: Option '-v' (debug) is not supported/implemented by tazpkg" - return 1 - ;; - esac - - echo "-v" -} - -_translate_noconfirm() { - - echo "$_EOPT" | $GREP -q ":noconfirm:" || return 0 - - local _opt= - local _ret=0 - - case "$_PACMAN" in - # FIXME: Update environment DEBIAN_FRONTEND=noninteractive - # FIXME: There is also --force-yes for a stronger case - "dpkg") _opt="--yes";; - "dnf") _opt="--assumeyes";; - "yum") _opt="--assumeyes";; - # FIXME: pacman has 'assume-yes' and 'assume-no' - # FIXME: zypper has better mode. Similar to dpkg (Debian). - "zypper") _opt="--no-confirm";; - "pkgng") _opt="-y";; - "tazpkg") _opt="--auto";; - *) - _opt="" - _ret=1 - _error "$_PACMAN: Option '--noconfirm' is not supported/implemented." - ;; - esac - - echo $_opt - return $_ret -} - -_translate_all() { - local _args="" - local _debug= - local _noconfirm= - - _debug="$(_translate_debug)" - _noconfirm="$(_translate_noconfirm)" - _args="$(_translate_w)" || return 1 - _args="${_args}${_noconfirm:+ }${_noconfirm}" || return 1 - _args="${_args}${_debug:+ }${_debug}" || return 1 - - export _EOPT="${_args# }" -} - -_print_supported_operations() { - local _pacman="$1" - echo -n "pacapt($_pacman): available operations:" - # shellcheck disable=2016 - $GREP -E "^${_pacman}_[^ \\t]+\\(\\)" "$0" \ - | $AWK -F '(' '{print $1}' \ - | sed -e "s/${_pacman}_//g" \ - | while read -r O; do - echo -n " $O" - done - echo -} - - -export _SUPPORTED_EXTERNALS=" - :conda - :tlmgr - :texlive - :gem - :npm - :pip -" -readonly _SUPPORTED_EXTERNALS - -_PACMAN_found_from_script_name() { - local _tmp_name= - local _pacman= - - _tmp_name="${BASH_SOURCE[0]:-?}" - if [[ "$_tmp_name" == "?" ]]; then - _error "Unable to get script name." - return 1 - fi - - _tmp_name="${_tmp_name##*/}" # base name (remove everything before the last `/`) - _tmp_name="${_tmp_name%.*}" # remove extension if any (remove everything from the last `.`) - _pacman="${_tmp_name##*-}" # remove every thing before the last `-` - - if grep -Eq -e ":$_pacman[[:space:]]*" <<< "$_SUPPORTED_EXTERNALS"; then - export _PACMAN="$_pacman" - return 0 - else - export _PACMAN="" - return 1 - fi -} - - - -_apk_init() { - : -} - -apk_Q() { - if [[ -z "$_TOPT" ]]; then - apk info - else - _not_implemented - fi -} - -apk_Qi() { - apk info -a -- "$@" -} - -apk_Ql() { - apk info -L -- "$@" -} - -apk_Qo() { - apk info --who-owns -- "$@" -} - -apk_Qs() { - apk info -- "*${*}*" -} - -apk_Qu() { - apk version -l '<' -} - -apk_R() { - apk del -- "$@" -} - -apk_Rn() { - apk del --purge -- "$@" -} - -apk_Rns() { - apk del --purge -r -- "$@" -} - -apk_Rs() { - apk del -r -- "$@" -} - -apk_S() { - case ${_EOPT} in - # Download only - ("fetch") shift - apk fetch -- "$@" ;; - (*) apk add $_TOPT -- "$@" ;; - esac -} - -apk_Sc() { - apk cache -v clean -} - -apk_Scc() { - rm -rf /var/cache/apk/* -} - -apk_Sccc() { - apk_Scc -} - -apk_Si() { - apk_Qi "$@" -} - -apk_Sii() { - apk info -r -- "$@" -} - -apk_Sl() { - apk search -v -- "$@" -} - -apk_Ss() { - apk_Sl "$@" -} - -apk_Su() { - apk upgrade -} - -apk_Suy() { - if [ "$#" -gt 0 ]; then - apk add -U -u -- "$@" - else - apk upgrade -U -a - fi -} - -apk_Sy() { - apk update -} - -apk_Sw() { - apk fetch -- "$@" -} - -apk_U() { - apk add --allow-untrusted -- "$@" -} - - - -_cave_init() { - shopt -u globstar -} - -cave_Q() { - if [[ "$_TOPT" == "q" ]]; then - cave show -f "${@:-world}" \ - | grep -v '^$' - else - cave show -f "${@:-world}" - fi -} - -cave_Qi() { - cave show "$@" -} - -cave_Ql() { - if [[ -n "$*" ]]; then - cave contents "$@" - return - fi - - cave show -f "${@:-world}" \ - | grep -v '^$' \ - | while read -r _pkg; do - if [[ "$_TOPT" == "q" ]]; then - cave --color no contents "$_pkg" - else - cave contents "$_pkg" - fi - done -} - -cave_Qo() { - cave owner "$@" -} - -cave_Qp() { - _not_implemented -} - -cave_Qu() { - if [[ -z "$*" ]];then - cave resolve -c world \ - | grep '^u.*' \ - | while read -r _pkg; do - echo "$_pkg" | cut -d'u' -f2- - done - else - cave resolve -c world \ - | grep '^u.*' \ - | grep -- "$@" - fi -} - -cave_Qs() { - cave show -f world | grep -- "$@" -} - -cave_Rs() { - if [[ "$_TOPT" == "" ]]; then - cave uninstall -r "$@" \ - && echo "Control-C to stop uninstalling..." \ - && sleep 2s \ - && cave uninstall -xr "$@" - else - cave purge "$@" \ - && echo "Control-C to stop uninstalling (+ dependencies)..." \ - && sleep 2s \ - && cave purge -x "$@" - fi -} - -cave_Rn() { - _not_implemented -} - -cave_Rns() { - _not_implemented -} - -cave_R() { - cave uninstall "$@" \ - && echo "Control-C to stop uninstalling..." \ - && sleep 2s \ - && cave uninstall -x "$@" -} - -cave_Si() { - cave show "$@" -} - -cave_Suy() { - cave sync && cave resolve -c "${@:-world}" \ - && echo "Control-C to stop upgrading..." \ - && sleep 2s \ - && cave resolve -cx "${@:-world}" -} - -cave_Su() { - cave resolve -c "$@" \ - && echo "Control-C to stop upgrading..." \ - && sleep 2s \ - && cave resolve -cx "$@" -} - -cave_Sy() { - cave sync "$@" -} - -cave_Ss() { - cave search "$@" -} - -cave_Sc() { - cave fix-cache "$@" -} - -cave_Scc() { - cave fix-cache "$@" -} - -cave_Sccc() { - #rm -fv /var/cache/paludis/* - _not_implemented -} - -cave_S() { - cave resolve $_TOPT "$@" \ - && echo "Control-C to stop installing..." \ - && sleep 2s \ - && cave resolve -x $_TOPT "$@" -} - -cave_U() { - _not_implemented -} - - - -_conda_init() { - : -} - -conda_Q() { - if [[ $# -gt 0 ]]; then - conda list "$(python -c 'import sys; print("^" + "|".join(sys.argv[1:]) + "$")' "$@")" - else - conda list - fi -} - -conda_R() { - conda remove "$@" -} - -conda_S() { - conda install "$@" -} - -conda_Sc() { - conda clean --all "$@" -} - -conda_Si() { - conda search "$@" --info -} - -conda_Ss() { - conda search "*$@*" -} - -conda_Suy() { - conda update --all "$@" -} - - - - -_dnf_init() { - : -} - -dnf_S() { - dnf install $_TOPT "$@" -} - -dnf_Sc() { - dnf clean expire-cache "$@" -} - -dnf_Scc() { - dnf clean packages "$@" -} - -dnf_Sccc() { - dnf clean all "$@" -} - -dnf_Si() { - dnf info "$@" -} - -dnf_Sg() { - if [[ $# -gt 0 ]]; then - dnf group info "$@" - else - dnf group list - fi -} - -dnf_Sl() { - dnf list available "$@" -} - -dnf_Ss() { - dnf search "$@" -} - -dnf_Su() { - dnf upgrade "$@" -} - -dnf_Suy() { - dnf upgrade "$@" -} - -dnf_Sw() { - dnf download "$@" -} - -dnf_Sy() { - dnf clean expire-cache && dnf check-update -} - -dnf_Q() { - if [[ "$_TOPT" == "q" ]]; then - rpm -qa --qf "%{NAME}\\n" - elif [[ "$_TOPT" == "" ]]; then - rpm -qa --qf "%{NAME} %{VERSION}\\n" - else - _not_implemented - fi -} - -dnf_Qc() { - rpm -q --changelog "$@" -} - -dnf_Qe() { - dnf repoquery --userinstalled "$@" -} - -dnf_Qi() { - dnf info "$@" -} - -dnf_Ql() { - rpm -ql "$@" -} - -dnf_Qm() { - dnf list extras -} - -dnf_Qo() { - rpm -qf "$@" -} - -dnf_Qp() { - rpm -qp "$@" -} - -dnf_Qs() { - rpm -qa "*${*}*" -} - -dnf_Qu() { - dnf list updates "$@" -} - -dnf_R() { - dnf remove "$@" -} - -dnf_U() { - dnf install "$@" -} - - - -_dpkg_init() { - : -} - -dpkg_Q() { - if [[ "$_TOPT" == "q" ]]; then - dpkg -l \ - | grep -E '^[hi]i' \ - | awk '{print $2}' - elif [[ "$_TOPT" == "" ]]; then - dpkg -l "$@" \ - | grep -E '^[hi]i' - else - _not_implemented - fi -} - -dpkg_Qi() { - dpkg-query -s "$@" -} - -dpkg_Ql() { - if [[ -n "$*" ]]; then - dpkg-query -L "$@" - return - fi - - dpkg -l \ - | grep -E '^[hi]i' \ - | awk '{print $2}' \ - | while read -r _pkg; do - if [[ "$_TOPT" == "q" ]]; then - dpkg-query -L "$_pkg" - else - dpkg-query -L "$_pkg" \ - | while read -r _line; do - echo "$_pkg $_line" - done - fi - done -} - -dpkg_Qo() { - dpkg-query -S "$@" -} - -dpkg_Qp() { - dpkg-deb -I "$@" -} - -dpkg_Qu() { - apt-get upgrade --trivial-only "$@" -} - -dpkg_Qs() { - # dpkg >= 1.16.2 dpkg-query -W -f='${db:Status-Abbrev} ${binary:Package}\t${Version}\t${binary:Summary}\n' - dpkg-query -W -f='${Status} ${Package}\t${Version}\t${Description}\n' \ - | grep -E '^((hold)|(install)|(deinstall))' \ - | sed -r -e 's#^(\w+ ){3}##g' \ - | grep -Ei "${@:-.}" -} - -dpkg_Rs() { - if [[ "$_TOPT" == "" ]]; then - apt-get autoremove "$@" - else - _not_implemented - fi -} - -dpkg_Rn() { - apt-get purge "$@" -} - -dpkg_Rns() { - apt-get --purge autoremove "$@" -} - -dpkg_R() { - apt-get remove "$@" -} - -dpkg_Si() { - apt-cache show "$@" -} - -dpkg_Suy() { - apt-get update \ - && apt-get upgrade "$@" \ - && apt-get dist-upgrade "$@" -} - -dpkg_Su() { - apt-get upgrade "$@" \ - && apt-get dist-upgrade "$@" -} - - -dpkg_Sy() { - apt-get update "$@" -} - -dpkg_Ss() { - apt-cache search "$@" -} - -dpkg_Sc() { - apt-get clean "$@" -} - -dpkg_Scc() { - apt-get autoclean "$@" -} - -dpkg_S() { - apt-get install $_TOPT "$@" -} - -dpkg_U() { - dpkg -i "$@" -} - -dpkg_Sii() { - apt-cache rdepends "$@" -} - -dpkg_Sccc() { - rm -fv /var/cache/apt/*.bin - rm -fv /var/cache/apt/archives/*.* - rm -fv /var/lib/apt/lists/*.* - apt-get autoclean -} - - - -_homebrew_init() { - : -} - -homebrew_Qi() { - brew info "$@" -} - -homebrew_Ql() { - brew list "$@" -} - -homebrew_Qo() { - local pkg prefix cellar - - # FIXME: What happens if the file is not exectutable? - cd "$(dirname -- "$(which "$@")")" || return - pkg="$(pwd -P)/$(basename -- "$@")" - prefix="$(brew --prefix)" - cellar="$(brew --cellar)" - - for package in $cellar/*; do - files=(${package}/*/${pkg/#$prefix\//}) - if [[ -e "${files[${#files[@]} - 1]}" ]]; then - echo "${package/#$cellar\//}" - break - fi - done -} - -homebrew_Qc() { - brew log "$@" -} - -homebrew_Qu() { - brew outdated | grep "$@" -} - -homebrew_Qs() { - brew list | grep "$@" -} - -homebrew_Q() { - if [[ "$_TOPT" == "" ]]; then - if [[ "$*" == "" ]]; then - brew list - else - brew list | grep "$@" - fi - else - _not_implemented - fi -} - -homebrew_Rs() { - which join > /dev/null - if [ $? -ne 0 ]; then - _die "pacapt: join binary does not exist in system." - fi - - which sort > /dev/null - if [ $? -ne 0 ]; then - _die "pacapt: sort binary does not exist in system." - fi - - if [[ "$@" == "" ]]; then - _die "pacapt: ${FUNCNAME[0]} requires arguments" - fi - - for _target in $@; - do - brew rm $_target - - while [ "$(join <(sort <(brew leaves)) <(sort <(brew deps $_target)))" != "" ] - do - brew rm $(join <(sort <(brew leaves)) <(sort <(brew deps $_target))) - done - done - -} - -homebrew_R() { - brew remove "$@" -} - -homebrew_Si() { - brew info "$@" -} - -homebrew_Suy() { - brew update \ - && brew upgrade "$@" -} - -homebrew_Su() { - brew upgrade "$@" -} - -homebrew_Sy() { - brew update "$@" -} - -homebrew_Ss() { - brew search "$@" -} - -homebrew_Sc() { - brew cleanup "$@" -} - -homebrew_Scc() { - brew cleanup -s "$@" -} - -homebrew_Sccc() { - # See more discussion in - # https://github.com/icy/pacapt/issues/47 - - local _dcache - - _dcache="$(brew --cache)" - case "$_dcache" in - ""|"/"|" ") - _error "${FUNCNAME[0]}: Unable to delete '$_dcache'." - ;; - - *) - # FIXME: This is quite stupid!!! But it's an easy way - # FIXME: to avoid some warning from #shellcheck. - # FIXME: Please note that, $_dcache is not empty now. - rm -rf "${_dcache:-/x/x/x/x/x/x/x/x/x/x/x//x/x/x/x/x/}/" - ;; - esac -} - -homebrew_S() { - brew install $_TOPT "$@" -} - - - -_macports_init() { - : -} - -macports_Ql() { - port contents "$@" -} - -macports_Qo() { - port provides "$@" -} - -macports_Qc() { - port log "$@" -} - -macports_Qu() { - port outdated "$@" -} - -macports_Rs() { - if [[ "$_TOPT" == "" ]]; then - port uninstall --follow-dependencies "$@" - else - _not_implemented - fi -} - -macports_R() { - port uninstall "$@" -} - -macports_Si() { - port info "$@" -} - -macports_Suy() { - port selfupdate \ - && port upgrade outdated "$@" -} - -macports_Su() { - port upgrade outdate "$@" -} - -macports_Sy() { - port selfupdate "$@" -} - -macports_Ss() { - port search "$@" -} - -macports_Sc() { - port clean --all inactive "$@" -} - -macports_Scc() { - port clean --all installed "$@" -} - -macports_S() { - if [[ "$_TOPT" == "fetch" ]]; then - port patch "$@" - else - port install "$@" - fi -} - - - -_pkgng_init() { - : -} - -pkgng_Qi() { - pkg info "$@" -} - -pkgng_Ql() { - pkg info -l "$@" -} - -pkgng_Qo() { - pkg which "$@" -} - -pkgng_Qp() { - pkg query -F "$@" '%n %v' -} - -pkgng_Qu() { - pkg upgrade -n "$@" -} - -pkgng_Q() { - if [[ "$_TOPT" == "q" ]]; then - pkg query '%n' "$@" - elif [[ "$_TOPT" == "" ]]; then - pkg query '%n %v' "$@" - else - _not_implemented - fi -} - -pkgng_Rs() { - if [[ "$_TOPT" == "" ]]; then - pkg remove "$@" - pkg autoremove - else - _not_implemented - fi -} - -pkgng_R() { - pkg remove "$@" -} - -pkgng_Si() { - pkg search -S name -ef "$@" -} - -pkgng_Suy() { - pkg upgrade "$@" -} - -pkgng_Su() { - pkg upgrade -U "$@" -} - -pkgng_Sy() { - pkg update "$@" -} - -pkgng_Ss() { - pkg search "$@" -} - -pkgng_Sc() { - pkg clean "$@" -} - -pkgng_Scc() { - pkg clean -a "$@" -} - -pkgng_S() { - if [[ "$_TOPT" == "fetch" ]]; then - pkg fetch "$@" - else - pkg install "$@" - fi -} - - - -_pkg_tools_init() { - : -} - -pkg_tools_Qi() { - # disable searching mirrors for packages - export PKG_PATH= - pkg_info "$@" -} - -pkg_tools_Ql() { - export PKG_PATH= - pkg_info -L "$@" -} - -pkg_tools_Qo() { - export PKG_PATH= - pkg_info -E "$@" -} - -pkg_tools_Qp() { - _not_implemented -} - -pkg_tools_Qu() { - export PKG_PATH= - pkg_add -u "$@" -} - -pkg_tools_Q() { - export PKG_PATH= - # the dash after the pkg name is so we don't catch partial matches - # because all packages in openbsd have the format 'pkgname-pkgver' - if [[ "$_TOPT" == "q" && ! -z "$*" ]]; then - pkg_info -q | grep "^${*}-" - elif [[ "$_TOPT" == "q" && -z "$*" ]];then - pkg_info -q - elif [[ "$_TOPT" == "" && ! -z "$*" ]]; then - pkg_info | grep "^${*}-" - elif [[ "$_TOPT" == "" && -z "$*" ]];then - pkg_info - else - _not_implemented - fi -} - -pkg_tools_Rs() { - if [[ "$_TOPT" == "" ]]; then - pkg_delete -D dependencies "$@" - else - _not_implemented - fi -} - -pkg_tools_Rn() { - if [[ "$_TOPT" == "" ]];then - pkg_delete -c "$@" - else - _not_implemented - fi -} - -pkg_tools_Rns() { - _not_implemented -} - -pkg_tools_R() { - pkg_delete "$@" -} - -pkg_tools_Si() { - pkg_info "$@" -} - -pkg_tools_Sl() { - pkg_info -L "$@" -} - -pkg_tools_Suy() { - # pkg_tools doesn't really have any concept of a database - # there's actually not really any database to update, so - # this function is mostly just for convienience since on arch - # doing -Su is normally a bad thing to do since it's a partial upgrade - - pkg_tools_Su "$@" -} - -pkg_tools_Su() { - pkg_add -u "$@" -} - -pkg_tools_Sy() { - _not_implemented -} - -pkg_tools_Ss() { - if [[ -z "$*" ]];then - _not_implemented - else - pkg_info -Q "$@" - fi -} - -pkg_tools_Sc() { - # by default no cache directory is used - if [[ -z "$PKG_CACHE" ]];then - echo "You have no cache directory set, set \$PKG_CACHE for a cache directory." - elif [[ ! -d "$PKG_CACHE" ]];then - echo "You have a cache directory set, but it does not exist. Create \"$PKG_CACHE\"." - else - _removing_is_dangerous "rm -rf $PKG_CACHE/*" - fi -} - -pkg_tools_Scc() { - _not_implemented -} - -pkg_tools_S() { - pkg_add "$@" -} - - - -_portage_init() { - : -} - -portage_Qi() { - emerge --info "$@" -} - -portage_Ql() { - if [[ -x '/usr/bin/qlist' ]]; then - qlist "$@" - elif [[ -x '/usr/bin/equery' ]]; then - equery files "$@" - else - _error "'portage-utils' or 'gentoolkit' package is required to perform this opreation." - fi -} - -portage_Qo() { - if [[ -x '/usr/bin/equery' ]]; then - equery belongs "$@" - else - _error "'gentoolkit' package is required to perform this operation." - fi -} - -portage_Qc() { - emerge -p --changelog "$@" -} - -portage_Qu() { - emerge -uvN "$@" -} - -portage_Q() { - if [[ "$_TOPT" == "" ]]; then - if [[ -x '/usr/bin/eix' ]]; then - eix -I "$@" - elif [[ -x '/usr/bin/equery' ]]; then - equery list -i "$@" - else - LS_COLORS=never \ - ls -1 -d /var/db/pkg/*/* - fi - else - _not_implemented - fi -} - -portage_Rs() { - if [[ "$_TOPT" == "" ]]; then - emerge --depclean world "$@" - else - _not_implemented - fi -} - -portage_R() { - emerge --depclean "@" -} - -portage_Si() { - emerge --info "$@" -} - -portage_Suy() { - if [[ -x '/usr/bin/layman' ]]; then - layman --sync-all \ - && emerge --sync \ - && emerge -auND world "$@" - else - emerge --sync \ - && emerge -uND world "$@" - fi -} - -portage_Su() { - emerge -uND world "$@" -} - -portage_Sy() { - if [[ -x "/usr/bin/layman" ]]; then - layman --sync-all \ - && emerge --sync "$@" - else - emerge --sync "$@" - fi -} - -portage_Ss() { - if [[ -x "/usr/bin/eix" ]]; then - eix "$@" - else - emerge --search "$@" - fi -} - -portage_Sc() { - if [[ -x "/usr/bin/eclean-dist" ]]; then - eclean-dist -d -t1m -s50 -f "$@" - else - _error "'gentoolkit' package is required to perform this operation." - fi -} - -portage_Scc() { - if [[ -x "/usr/bin/eclean" ]]; then - eclean -i distfiles "$@" - else - _error "'gentoolkit' package is required to perform this operation." - fi -} - -portage_Sccc() { - rm -fv /usr/portage/distfiles/*.* -} - -portage_S() { - emerge "$@" -} - - - -_sun_tools_init() { - # The purpose of `if` is to make sure this function - # can be invoked on other system (Linux, BSD). - if [[ "$(uname)" == "SunOS" ]]; then - export GREP=/usr/xpg4/bin/grep - export AWK=nawk - fi -} - -sun_tools_Qi() { - pkginfo -l "$@" -} - -sun_tools_Ql() { - pkginfo -l "$@" -} - -sun_tools_Qo() { - $GREP "$@" /var/sadm/install/contents -} - -sun_tools_Qs() { - pkginfo | $GREP -i "$@" -} - -sun_tools_Q() { - # the dash after the pkg name is so we don't catch partial matches - # because all packages in openbsd have the format 'pkgname-pkgver' - if [[ "$_TOPT" == "q" && ! -z "$*" ]]; then - pkginfo | $GREP "$@" - elif [[ "$_TOPT" == "q" && -z "$*" ]]; then - pkginfo - else - pkginfo "$@" - fi -} - -sun_tools_R() { - pkgrm "$@" -} - -sun_tools_U() { - pkgadd "$@" -} - - - -_swupd_init() { - : -} - -swupd_Qk() { - swupd verify "$@" -} - -swupd_Qo() { - swupd search "$@" -} - -swupd_Qs() { - swupd search "$@" -} - -swupd_R() { - swupd bundle-remove "$@" -} - -swupd_Suy() { - swupd update -} - -swupd_Su() { - swupd update -} - -swupd_Sy() { - swupd search -i - swupd update -} - -swupd_Ss() { - swupd search "$@" -} - -swupd_S() { - swupd bundle-add "$@" -} - - -_tazpkg_init() { - : -} - -tazpkg_Q() { - if [[ "$_TOPT" == "q" ]]; then - tazpkg list "$@" \ - | awk '{ if (NF == 2 || NF == 3) { print $1; }}' - elif [[ "$_TOPT" == "" ]]; then - tazpkg list "$@" - else - _not_implemented - fi -} - -tazpkg_Qi() { - tazpkg info "$@" -} - -tazpkg_Ql() { - if [[ -z "$*" ]]; then - _not_implemented - return - fi - - if [[ "$_TOPT" == "q" ]]; then - { - tazpkg list-files "$@" - tazpkg list-config "$@" - } \ - | grep ^/ - else - tazpkg list-files "$@" - tazpkg list-config "$@" - fi -} - -tazpkg_Sy() { - tazpkg recharge -} - -tazpkg_Su() { - tazpkg up -} - -tazpkg_Suy() { - tazpkg_Sy \ - && tazpkg_Su -} - -tazpkg_S() { - local _forced="" - - if grep -q -- "--forced" <<<"$*"; then - _forced="--forced" - fi - - while (( $# )); do - if [[ "$1" == "--forced" ]]; then - _forced="--forced" - shift - continue - fi - - tazpkg get-install "$1" $_forced - shift - done -} - -tazpkg_R() { - local _auto="" - - if grep -q -- "--auto" <<<"$*"; then - _auto="--auto" - fi - - while (( $# )); do - if [[ "$1" == "--auto" ]]; then - _auto="--auto" - shift - continue - fi - - tazpkg remove "$1" $_auto - shift - done -} - -tazpkg_Sc() { - tazpkg clean-cache -} - -tazpkg_Scc() { - tazpkg clean-cache - cd /var/lib/tazpkg/ \ - && { - rm -fv \ - ./*.bak \ - ID \ - packages.* \ - files.list.* - } -} - -tazpkg_Ss() { - tazpkg search "$@" -} - -tazpkg_Qo() { - tazpkg search-pkgname "$@" -} - -tazpkg_U() { - local _forced="" - - if grep -q -- "--forced" <<<"$*"; then - _forced="--forced" - fi - - while (( $# )); do - if [[ "$1" == "--forced" ]]; then - _forced="--forced" - shift - continue - fi - - tazpkg install "$1" $_forced - shift - done -} - - - -_tlmgr_init() { - : -} - -tlmgr_Qi() { - tlmgr info --only-installed "$@" -} - -tlmgr_Qk() { - tlmgr check files -} - -tlmgr_Ql() { - tlmgr info --only-installed --list "$@" -} - -tlmgr_R() { - tlmgr remove "$@" -} - -tlmgr_S() { - tlmgr install "$@" -} - -tlmgr_Si() { - tlmgr info "$@" -} - -tlmgr_Sl() { - tlmgr info -} - -tlmgr_Ss() { - tlmgr search --global "$@" -} - -tlmgr_Suy() { - tlmgr update --all -} - -tlmgr_U() { - tlmgr install --file "$@" -} - - - -_yum_init() { - : -} - -yum_Q() { - if [[ "$_TOPT" == "q" ]]; then - rpm -qa --qf "%{NAME}\\n" - elif [[ "$_TOPT" == "" ]]; then - rpm -qa --qf "%{NAME} %{VERSION}\\n" - else - _not_implemented - fi -} - -yum_Qi() { - yum info "$@" -} - -yum_Qs() { - rpm -qa "*${*}*" -} - -yum_Ql() { - rpm -ql "$@" -} - -yum_Qo() { - rpm -qf "$@" -} - -yum_Qp() { - rpm -qp "$@" -} - -yum_Qc() { - rpm -q --changelog "$@" -} - -yum_Qu() { - yum list updates "$@" -} - -yum_Qm() { - yum list extras "$@" -} - -yum_Rs() { - if [[ "$_TOPT" == "" ]]; then - yum erase "$@" - else - _not_implemented - fi -} - -yum_R() { - yum erase "$@" -} - -yum_Si() { - yum info "$@" -} - -yum_Suy() { - yum update "$@" -} - -yum_Su() { - yum update "$@" -} - -yum_Sy() { - yum check-update "$@" -} - -yum_Ss() { - yum -C search "$@" -} - -yum_Sc() { - yum clean expire-cache "$@" -} - -yum_Scc() { - yum clean packages "$@" -} - -yum_Sccc() { - yum clean all "$@" -} - -yum_S() { - yum install $_TOPT "$@" -} - -yum_U() { - yum localinstall "$@" -} - -yum_Sii() { - yum resolvedep "$@" -} - - - -_zypper_init() { - : -} - -zypper_Qc() { - rpm -q --changelog "$@" -} - -zypper_Qi() { - zypper info "$@" -} - -zypper_Ql() { - rpm -ql "$@" -} - -zypper_Qu() { - zypper list-updates "$@" -} - -zypper_Qm() { - zypper search -si "$@" \ - | grep 'System Packages' -} - -zypper_Qo() { - rpm -qf "$@" -} - -zypper_Qp() { - rpm -qip "$@" -} - -zypper_Qs() { - zypper search --installed-only "$@" -} - -zypper_Q() { - if [[ "$_TOPT" == "q" ]]; then - zypper search -i "$@" \ - | grep ^i \ - | awk '{print $3}' - elif [[ "$_TOPT" == "" ]]; then - zypper search -i "$@" - else - _not_implemented - fi -} - -zypper_Rs() { - if [[ "$_TOPT" == "s" ]]; then - zypper remove "$@" --clean-deps - else - _not_implemented - fi -} - -zypper_R() { - zypper remove "$@" -} - -zypper_Rn() { - # Remove configuration files - while read -r file; do - if [[ -f "$file" ]]; then - rm -fv "$file" - fi - done < <(rpm -ql "$@") - - # Now remove the package per-se - zypper remove "$@" -} - -zypper_Rs() { - if [[ "$_TOPT" == "s" ]]; then - zypper remove "$@" --clean-deps - else - _not_implemented - fi -} - -zypper_Rns() { - # Remove configuration files - while read -r file; do - if [[ -f "$file" ]]; then - rm -fv "$file" - fi - done < <(rpm -ql "$@") - - zypper remove "$@" --clean-deps -} - -zypper_Suy() { - zypper dup "$@" -} - -zypper_Sy() { - zypper refresh "$@" -} - -zypper_Sl() { - if [[ $# -eq 0 ]]; then - zypper pa -R - else - zypper pa -r "$@" - fi -} - -zypper_Ss() { - zypper search "$@" -} - -zypper_Su() { - zypper --no-refresh dup "$@" -} - -zypper_Sc() { - zypper clean "$@" -} - -zypper_Scc() { - zypper clean "$@" -} - -zypper_Sccc() { - # Not way to do this in zypper - _not_implemented -} - -zypper_Si() { - zypper info --requires "$@" -} - -zypper_Sii() { - # Ugly and slow, but does the trick - local packages= - - packages="$(zypper pa -R | cut -d \| -f 3 | tr -s '\n' ' ')" - for package in $packages; do - zypper info --requires "$package" \ - | grep -q "$@" && echo $package - done -} - -zypper_S() { - zypper install $_TOPT "$@" -} - -zypper_Sw() { - zypper install --download-only "$@" -} - -zypper_U() { - zypper install "$@" -} -_validate_operation() { - case "$1" in - "apk_Q") ;; - "apk_Qi") ;; - "apk_Ql") ;; - "apk_Qo") ;; - "apk_Qs") ;; - "apk_Qu") ;; - "apk_R") ;; - "apk_Rn") ;; - "apk_Rns") ;; - "apk_Rs") ;; - "apk_S") ;; - "apk_Sc") ;; - "apk_Scc") ;; - "apk_Sccc") ;; - "apk_Si") ;; - "apk_Sii") ;; - "apk_Sl") ;; - "apk_Ss") ;; - "apk_Su") ;; - "apk_Suy") ;; - "apk_Sy") ;; - "apk_Sw") ;; - "apk_U") ;; - "cave_Q") ;; - "cave_Qi") ;; - "cave_Ql") ;; - "cave_Qo") ;; - "cave_Qp") ;; - "cave_Qu") ;; - "cave_Qs") ;; - "cave_Rs") ;; - "cave_Rn") ;; - "cave_Rns") ;; - "cave_R") ;; - "cave_Si") ;; - "cave_Suy") ;; - "cave_Su") ;; - "cave_Sy") ;; - "cave_Ss") ;; - "cave_Sc") ;; - "cave_Scc") ;; - "cave_Sccc") ;; - "cave_S") ;; - "cave_U") ;; - "conda_Q") ;; - "conda_R") ;; - "conda_S") ;; - "conda_Sc") ;; - "conda_Si") ;; - "conda_Ss") ;; - "conda_Suy") ;; - "dnf_S") ;; - "dnf_Sc") ;; - "dnf_Scc") ;; - "dnf_Sccc") ;; - "dnf_Si") ;; - "dnf_Sg") ;; - "dnf_Sl") ;; - "dnf_Ss") ;; - "dnf_Su") ;; - "dnf_Suy") ;; - "dnf_Sw") ;; - "dnf_Sy") ;; - "dnf_Q") ;; - "dnf_Qc") ;; - "dnf_Qe") ;; - "dnf_Qi") ;; - "dnf_Ql") ;; - "dnf_Qm") ;; - "dnf_Qo") ;; - "dnf_Qp") ;; - "dnf_Qs") ;; - "dnf_Qu") ;; - "dnf_R") ;; - "dnf_U") ;; - "dpkg_Q") ;; - "dpkg_Qi") ;; - "dpkg_Ql") ;; - "dpkg_Qo") ;; - "dpkg_Qp") ;; - "dpkg_Qu") ;; - "dpkg_Qs") ;; - "dpkg_Rs") ;; - "dpkg_Rn") ;; - "dpkg_Rns") ;; - "dpkg_R") ;; - "dpkg_Si") ;; - "dpkg_Suy") ;; - "dpkg_Su") ;; - "dpkg_Sy") ;; - "dpkg_Ss") ;; - "dpkg_Sc") ;; - "dpkg_Scc") ;; - "dpkg_S") ;; - "dpkg_U") ;; - "dpkg_Sii") ;; - "dpkg_Sccc") ;; - "homebrew_Qi") ;; - "homebrew_Ql") ;; - "homebrew_Qo") ;; - "homebrew_Qc") ;; - "homebrew_Qu") ;; - "homebrew_Qs") ;; - "homebrew_Q") ;; - "homebrew_Rs") ;; - "homebrew_R") ;; - "homebrew_Si") ;; - "homebrew_Suy") ;; - "homebrew_Su") ;; - "homebrew_Sy") ;; - "homebrew_Ss") ;; - "homebrew_Sc") ;; - "homebrew_Scc") ;; - "homebrew_Sccc") ;; - "homebrew_S") ;; - "macports_Ql") ;; - "macports_Qo") ;; - "macports_Qc") ;; - "macports_Qu") ;; - "macports_Rs") ;; - "macports_R") ;; - "macports_Si") ;; - "macports_Suy") ;; - "macports_Su") ;; - "macports_Sy") ;; - "macports_Ss") ;; - "macports_Sc") ;; - "macports_Scc") ;; - "macports_S") ;; - "pkgng_Qi") ;; - "pkgng_Ql") ;; - "pkgng_Qo") ;; - "pkgng_Qp") ;; - "pkgng_Qu") ;; - "pkgng_Q") ;; - "pkgng_Rs") ;; - "pkgng_R") ;; - "pkgng_Si") ;; - "pkgng_Suy") ;; - "pkgng_Su") ;; - "pkgng_Sy") ;; - "pkgng_Ss") ;; - "pkgng_Sc") ;; - "pkgng_Scc") ;; - "pkgng_S") ;; - "pkg_tools_Qi") ;; - "pkg_tools_Ql") ;; - "pkg_tools_Qo") ;; - "pkg_tools_Qp") ;; - "pkg_tools_Qu") ;; - "pkg_tools_Q") ;; - "pkg_tools_Rs") ;; - "pkg_tools_Rn") ;; - "pkg_tools_Rns") ;; - "pkg_tools_R") ;; - "pkg_tools_Si") ;; - "pkg_tools_Sl") ;; - "pkg_tools_Suy") ;; - "pkg_tools_Su") ;; - "pkg_tools_Sy") ;; - "pkg_tools_Ss") ;; - "pkg_tools_Sc") ;; - "pkg_tools_Scc") ;; - "pkg_tools_S") ;; - "portage_Qi") ;; - "portage_Ql") ;; - "portage_Qo") ;; - "portage_Qc") ;; - "portage_Qu") ;; - "portage_Q") ;; - "portage_Rs") ;; - "portage_R") ;; - "portage_Si") ;; - "portage_Suy") ;; - "portage_Su") ;; - "portage_Sy") ;; - "portage_Ss") ;; - "portage_Sc") ;; - "portage_Scc") ;; - "portage_Sccc") ;; - "portage_S") ;; - "sun_tools_Qi") ;; - "sun_tools_Ql") ;; - "sun_tools_Qo") ;; - "sun_tools_Qs") ;; - "sun_tools_Q") ;; - "sun_tools_R") ;; - "sun_tools_U") ;; - "swupd_Qk") ;; - "swupd_Qo") ;; - "swupd_Qs") ;; - "swupd_R") ;; - "swupd_Suy") ;; - "swupd_Su") ;; - "swupd_Sy") ;; - "swupd_Ss") ;; - "swupd_S") ;; - "tazpkg_Q") ;; - "tazpkg_Qi") ;; - "tazpkg_Ql") ;; - "tazpkg_Sy") ;; - "tazpkg_Su") ;; - "tazpkg_Suy") ;; - "tazpkg_S") ;; - "tazpkg_R") ;; - "tazpkg_Sc") ;; - "tazpkg_Scc") ;; - "tazpkg_Ss") ;; - "tazpkg_Qo") ;; - "tazpkg_U") ;; - "tlmgr_Qi") ;; - "tlmgr_Qk") ;; - "tlmgr_Ql") ;; - "tlmgr_R") ;; - "tlmgr_S") ;; - "tlmgr_Si") ;; - "tlmgr_Sl") ;; - "tlmgr_Ss") ;; - "tlmgr_Suy") ;; - "tlmgr_U") ;; - "yum_Q") ;; - "yum_Qi") ;; - "yum_Qs") ;; - "yum_Ql") ;; - "yum_Qo") ;; - "yum_Qp") ;; - "yum_Qc") ;; - "yum_Qu") ;; - "yum_Qm") ;; - "yum_Rs") ;; - "yum_R") ;; - "yum_Si") ;; - "yum_Suy") ;; - "yum_Su") ;; - "yum_Sy") ;; - "yum_Ss") ;; - "yum_Sc") ;; - "yum_Scc") ;; - "yum_Sccc") ;; - "yum_S") ;; - "yum_U") ;; - "yum_Sii") ;; - "zypper_Qc") ;; - "zypper_Qi") ;; - "zypper_Ql") ;; - "zypper_Qu") ;; - "zypper_Qm") ;; - "zypper_Qo") ;; - "zypper_Qp") ;; - "zypper_Qs") ;; - "zypper_Q") ;; - "zypper_Rs") ;; - "zypper_R") ;; - "zypper_Rn") ;; - "zypper_Rs") ;; - "zypper_Rns") ;; - "zypper_Suy") ;; - "zypper_Sy") ;; - "zypper_Sl") ;; - "zypper_Ss") ;; - "zypper_Su") ;; - "zypper_Sc") ;; - "zypper_Scc") ;; - "zypper_Sccc") ;; - "zypper_Si") ;; - "zypper_Sii") ;; - "zypper_S") ;; - "zypper_Sw") ;; - "zypper_U") ;; - *) return 1 ;; - esac -} - - - -set -u -unset GREP_OPTIONS - -: "${PACAPT_DEBUG=}" # Show what will be going -: "${GREP:=grep}" # Need to update in, e.g, _sun_tools_init -: "${AWK:=awk}" # Need to update in, e.g, _sun_tools_init - -_sun_tools_init # Dirty tricky patch for SunOS - -export PACAPT_DEBUG GREP AWK - -_POPT="" # primary operation -_SOPT="" # secondary operation -_TOPT="" # options for operations -_EOPT="" # extra options (directly given to package manager) - # these options will be translated by (_translate_all) method. -_PACMAN="" # name of the package manager - -_PACMAN_detect \ -|| _die "'pacapt' doesn't support your package manager." - -if [[ -z "$PACAPT_DEBUG" ]]; then - [[ "$_PACMAN" != "pacman" ]] \ - || exec "/usr/bin/pacman" "$@" -elif [[ "$PACAPT_DEBUG" != "auto" ]]; then - _PACMAN="$PACAPT_DEBUG" -fi - -case "${1:-}" in -"update") shift; set -- -Sy "$@" ;; -"upgrade") shift; set -- -Su "$@" ;; -"install") shift; set -- -S "$@" ;; -"search") shift; set -- -Ss "$@" ;; -"remove") shift; set -- -R "$@" ;; -"autoremove") shift; set -- -Rs "$@" ;; -"clean") shift; set -- -Scc "$@" ;; -esac - -while :; do - _args="${1-}" - - [[ "${_args:0:1}" == "-" ]] || break - - case "${_args}" in - "--help") - _help - exit 0 - ;; - - "--noconfirm") - shift - _EOPT="$_EOPT:noconfirm:" - continue - ;; - - "-"|"--") - shift - break - ;; - esac - - i=1 - while [[ "$i" -lt "${#_args}" ]]; do - _opt="${_args:$i:1}" - (( i ++ )) - - case "$_opt" in - h) - _help - exit 0 - ;; - V) - _print_pacapt_version; - exit 0 - ;; - P) - _print_supported_operations "$_PACMAN" - exit 0 - ;; - - Q|S|R|U) - if [[ -n "$_POPT" && "$_POPT" != "$_opt" ]]; then - _error "Only one operation may be used at a time" - exit 1 - fi - _POPT="$_opt" - ;; - - # Comment 2015 May 26th: This part deals with the 2nd option. - # Most of the time, there is only one 2nd option. But some - # operation may need extra and/or duplicate (e.g, Sy <> Syy). - # - # See also - # - # * https://github.com/icy/pacapt/issues/13 - # - # This implementation works, but with a bug. #Rsn works - # but #Rns is translated to #Rn (incorrectly.) - # Thanks Huy-Ngo for this nice catch. - # - # FIXME: Please check pacman(8) to see if they are really 2nd operation - # - e|g|i|l|m|n|o|p|s) - if [[ "$_SOPT" == '' ]]; then - _SOPT="$_opt" - continue - fi - - # Understand it: - # If there is already an option recorded, the incoming option - # will come and compare itself with known one. - # We have a table - # - # known one vs. incoming ? | result - # < | one-new - # = | one-one - # > | new-one - # - # Let's say, after this step, the 3rd option comes (named X), - # and the current result is "a-b". We have a table - # - # a(b) vs. X | result - # < | aX (b dropped) - # = | aa (b dropped) - # > | Xa (b dropped) - # - # In any case, the first one matters. - # - if [[ "${_SOPT:0:1}" < "$_opt" ]]; then - _SOPT="${_SOPT:0:1}$_opt" - elif [[ "${_SOPT:0:1}" == "$_opt" ]]; then - _SOPT="$_opt$_opt" - else - _SOPT="$_opt${_SOPT:0:1}" - fi - - ;; - - q) - _TOPT="$_opt" ;; # Thanks to James Pearson - - u) - if [[ "${_SOPT:0:1}" == "y" ]]; then - _SOPT="uy" - else - _SOPT="u" - fi - ;; - - y) - if [[ "${_SOPT:0:1}" == "u" ]]; then - _SOPT="uy" - else - _SOPT="y" - fi - ;; - - c) - if [[ "${_SOPT:0:2}" == "cc" ]]; then - _SOPT="ccc" - elif [[ "${_SOPT:0:1}" == "c" ]]; then - _SOPT="cc" - else - _SOPT="$_opt" - fi - ;; - - w|v) - _EOPT="$_EOPT:$_opt:" - ;; - - *) - # FIXME: If option is unknown, we will break the loop - # FIXME: and this option will be used by the native program. - # FIXME: break 2 - _die "pacapt: Unknown option '$_opt'." - ;; - esac - done - - shift - - # If the primary option and the secondary are known - # we would break the argument detection, but for sure we will look - # forward to see there is anything interesting... - if [[ -n "$_POPT" && -n "$_SOPT" ]]; then - case "${1:-}" in - "-w"|"--noconfirm") ;; - *) break;; - esac - - # Don't have anything from the **first** argument. Something wrong. - # FIXME: This means that user must enter at least primary action - # FIXME: or secondary action in the very first part... - elif [[ -z "${_POPT}${_SOPT}${_TOPT}" ]]; then - break - fi -done - -[[ -n "$_POPT" ]] \ -|| _die "Usage: pacapt # -h for help, -P list supported functions" - -_validate_operation "${_PACMAN}_${_POPT}${_SOPT}" \ -|| { - _not_implemented - exit 1 -} - -_translate_all || exit - -if [[ -n "$*" ]]; then - case "${_POPT}${_SOPT}" in - "Su"|"Sy"|"Suy") - echo 1>&2 "WARNING ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - echo 1>&2 " The -Sy/u options refresh and/or upgrade all packages." - echo 1>&2 " To install packages as well, use separate commands:" - echo 1>&2 - echo 1>&2 " $0 -S$_SOPT; $0 -S ${*}" - echo 1>&2 "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - esac -fi - -if [[ -n "$PACAPT_DEBUG" ]]; then - echo "pacapt: $_PACMAN, p=$_POPT, s=$_SOPT, t=$_TOPT, e=$_EOPT" - echo "pacapt: execute '${_PACMAN}_${_POPT}${_SOPT} $_EOPT ${*}'" - declare -f "${_PACMAN}_${_POPT}${_SOPT}" -else - "_${_PACMAN}_init" || exit - "${_PACMAN}_${_POPT}${_SOPT}" $_EOPT "$@" -fi diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml index a63c197..408b078 100644 --- a/bitbucket-pipelines.yml +++ b/bitbucket-pipelines.yml @@ -1,6 +1,6 @@ --- -image: atlassian/default-image:2 +image: debian:buster options: size: 2x @@ -14,7 +14,7 @@ pipelines: - step: name: Pre Parallelization stage script: - - echo "Running tests in 28 batches" + - echo "Running tests in 30 batches" - step: name: Check if number of batches match actual number of scenarios script: @@ -32,8 +32,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 1 - step: @@ -41,8 +40,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 2 - step: @@ -50,8 +48,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 3 - step: @@ -59,8 +56,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 4 - step: @@ -68,8 +64,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 5 - step: @@ -77,8 +72,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 6 - step: @@ -86,8 +80,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 7 - step: @@ -95,8 +88,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 8 - step: @@ -104,8 +96,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 9 - step: @@ -113,8 +104,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 10 - step: @@ -122,8 +112,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 11 - step: @@ -131,8 +120,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 12 - step: @@ -140,8 +128,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 13 - step: @@ -149,8 +136,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 14 - step: @@ -158,8 +144,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 15 - step: @@ -167,8 +152,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 16 - step: @@ -176,8 +160,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 17 - step: @@ -185,8 +168,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 18 - step: @@ -194,8 +176,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 19 - step: @@ -203,8 +184,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 20 - step: @@ -212,8 +192,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 21 - step: @@ -221,8 +200,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 22 - step: @@ -230,8 +208,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 23 - step: @@ -239,8 +216,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 24 - step: @@ -248,8 +224,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 25 - step: @@ -257,8 +232,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 26 - step: @@ -266,8 +240,7 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 27 - step: @@ -275,8 +248,23 @@ pipelines: services: - docker script: - - apt-get update && apt-get install -y virtualenv python-dev - - ./bin/install-ansible + - apt-get update && ./bin/install-ansible --dev - ./bin/run-tests-in-batches --batch 28 + - step: + name: Molecule Test Batch - 29 + services: + - docker + script: + - apt-get update && ./bin/install-ansible --dev + - ./bin/run-tests-in-batches --batch 29 + + - step: + name: Molecule Test Batch - 30 + services: + - docker + script: + - apt-get update && ./bin/install-ansible --dev + - ./bin/run-tests-in-batches --batch 30 + diff --git a/group_vars/aws_node_local.yml b/group_vars/aws_node_local.yml index cd6ea3b..bb380b9 100644 --- a/group_vars/aws_node_local.yml +++ b/group_vars/aws_node_local.yml @@ -45,6 +45,10 @@ atl_product_log_locations: - "{{ atl_product_home }}/log" crowd: [] +atl_provisioner_log_locations: + - "/var/log/ansible-bootstrap.log" + - "/var/log/cfn-*.log" + # The following are imports from the environment. These are generally # set in /etc/atl by the CloudFormation template and sourced before # Ansible is run. See bin/ansible-with-atl-env for a convenient wrapper @@ -126,3 +130,6 @@ atl_rds_instance_class: "{{ lookup('env', 'ATL_RDS_INSTANCE_CLASS') }}" atl_rds_multi_az: "{{ lookup('env', 'ATL_RDS_MULTI_AZ') }}" atl_rds_subnet_group_name: "{{ lookup('env', 'ATL_RDS_SUBNET_GROUP_NAME') }}" atl_rds_security_group: "{{ lookup('env', 'ATL_RDS_SECURITY_GROUP') }}" + +atl_backup_manifest_url: "{{ lookup('env', 'ATL_BACKUP_MANIFEST_URL') }}" +atl_restore_required: "{{ atl_backup_manifest_url is defined and atl_backup_manifest_url != '' }}" diff --git a/pipeline_generator/Makefile b/pipeline_generator/Makefile index 5b6e164..aed9280 100644 --- a/pipeline_generator/Makefile +++ b/pipeline_generator/Makefile @@ -1,2 +1,2 @@ generate-pipeline: - @python pipeline.py + @python3 pipeline.py diff --git a/pipeline_generator/pipeline.py b/pipeline_generator/pipeline.py index 62b1408..fd9edbb 100644 --- a/pipeline_generator/pipeline.py +++ b/pipeline_generator/pipeline.py @@ -46,14 +46,13 @@ class Step: class ScriptCommand: - INSTALL_PACKAGES_COMMAND = "apt-get update && apt-get install -y virtualenv python-dev" - INSTALL_ANSIBLE_COMMAND = "./bin/install-ansible" + PACKAGE_INSTALL_COMMAND = "apt-get update && ./bin/install-ansible --dev" def __init__(self, test_command): self.test_command = test_command def all_commands(self): - return [self.INSTALL_PACKAGES_COMMAND, self.INSTALL_ANSIBLE_COMMAND, self.test_command] + return [self.PACKAGE_INSTALL_COMMAND, self.test_command] def main(): diff --git a/pipeline_generator/templates/bitbucket-pipelines.yml.j2 b/pipeline_generator/templates/bitbucket-pipelines.yml.j2 index 9b7fe42..6b89e62 100644 --- a/pipeline_generator/templates/bitbucket-pipelines.yml.j2 +++ b/pipeline_generator/templates/bitbucket-pipelines.yml.j2 @@ -1,6 +1,6 @@ --- -image: atlassian/default-image:2 +image: debian:buster options: size: 2x @@ -36,4 +36,4 @@ pipelines: {% for scriptCommand in parallel_step.scriptCommands -%} - {{ scriptCommand }} {% endfor %} - {% endfor %} \ No newline at end of file + {% endfor %} diff --git a/roles/aws_common/defaults/main.yml b/roles/aws_common/defaults/main.yml index 3cb3b65..d43b4c4 100644 --- a/roles/aws_common/defaults/main.yml +++ b/roles/aws_common/defaults/main.yml @@ -11,3 +11,4 @@ atl_aws_enable_cloudwatch_logs: false atl_aws_agent_restart: true atl_aws_log_group: "{{ atl_product_edition }}-{{ atl_aws_stack_name }}" +atl_aws_provisioning_log_group: "{{ atl_aws_log_group }}-provisioning" diff --git a/roles/aws_common/molecule/default/tests/test_default.py b/roles/aws_common/molecule/default/tests/test_default.py index 53261bb..f605623 100644 --- a/roles/aws_common/molecule/default/tests/test_default.py +++ b/roles/aws_common/molecule/default/tests/test_default.py @@ -20,6 +20,7 @@ def test_package_exes(host, exe): def test_service_file(host): f = host.file('/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json') assert f.contains('"log_group_name": "jira-software-MY_STACK"') + assert f.contains('"log_group_name": "jira-software-MY_STACK-provisioning"') assert f.user == 'root' assert f.group == 'root' assert f.mode == 0o0644 diff --git a/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 b/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 index 17b31ca..81caac2 100644 --- a/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 +++ b/roles/aws_common/templates/amazon-cloudwatch-agent.json.j2 @@ -19,7 +19,14 @@ "log_stream_name": "{instance_id}" } {% endfor %} - + {% for path in atl_provisioner_log_locations %} + {{ comma() }} + { + "file_path": "{{ path }}", + "log_group_name": "{{ atl_aws_provisioning_log_group }}", + "log_stream_name": "{instance_id}" + } + {% endfor %} ] } } diff --git a/roles/crowd_config/tasks/main.yml b/roles/crowd_config/tasks/main.yml index ef7dcc2..b5b987c 100644 --- a/roles/crowd_config/tasks/main.yml +++ b/roles/crowd_config/tasks/main.yml @@ -5,16 +5,19 @@ src: server.xml.j2 dest: "{{ atl_product_installation_versioned }}/apache-tomcat/conf/server.xml" -- name: Override JVM memory settings. - # Ugly but necessary as the product installs this file so we need to make the change here. +- name: Set the minimum heap size (Xms) lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" - backrefs: true - regexp: "^{{ item }}=" - line: "{{ item }}=\"{{ atl_jvm_heap }}\"" - with_items: - - 'JVM_MINIMUM_MEMORY' - - 'JVM_MAXIMUM_MEMORY' + regexp: '^(.*)Xms(\d+\w)(\s.*)$' + line: '\1Xms{{ atl_jvm_heap }}\3' + backrefs: yes + +- name: Set the maxmimum heap size (Xmx) + lineinfile: + path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" + regexp: '^(.*)Xmx(\d+\w)(\s.*)$' + line: '\1Xmx{{ atl_jvm_heap }}\3' + backrefs: yes - name: Set Crowd home directory in crowd-init.properties file lineinfile: @@ -27,7 +30,13 @@ insertafter: "EOF" line: 'export CATALINA_OPTS="${CATALINA_OPTS} {{ atl_catalina_opts }} {{ atl_catalina_opts_extra }}"' -- name: Set JAVA_HOME #FIXME +- name: Set the Crowd node name via CATALINA_OPTS + lineinfile: + path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" + insertafter: "EOF" + line: export CATALINA_OPTS="${CATALINA_OPTS} -Dcluster.node.name={{ ansible_ec2_instance_id }}-{{ ansible_ec2_local_ipv4 }}" + +- name: Set JAVA_HOME lineinfile: path: "{{ atl_product_installation_versioned }}/apache-tomcat/bin/setenv.sh" insertafter: "EOF" diff --git a/roles/database_init/tasks/main.yml b/roles/database_init/tasks/main.yml index 99638f7..8827f99 100644 --- a/roles/database_init/tasks/main.yml +++ b/roles/database_init/tasks/main.yml @@ -1,40 +1,40 @@ --- -- name: Create application DB user - postgresql_user: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - port: "{{ atl_db_port }}" - name: "{{ atl_jdbc_user }}" - password: "{{ atl_jdbc_password }}" - expires: 'infinity' - tags: - - new_only +- block: -- name: Update root privs for new user - postgresql_privs: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - database: postgres - roles: "{{ atl_db_root_user }}" - objs: "{{ atl_jdbc_user }}" - type: group - tags: - - new_only + - name: Create application DB user + postgresql_user: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_user }}" + password: "{{ atl_jdbc_password }}" + expires: 'infinity' + + - name: Update root privs for new user + postgresql_privs: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + database: postgres + roles: "{{ atl_db_root_user }}" + objs: "{{ atl_jdbc_user }}" + type: group + + - name: Create new application database + postgresql_db: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_db_name }}" + owner: "{{ atl_jdbc_user }}" + encoding: "{{ atl_jdbc_encoding }}" + lc_collate: "{{ atl_jdbc_collation }}" + lc_ctype: "{{ atl_jdbc_ctype }}" + template: "{{ atl_jdbc_template }}" + register: db_created -- name: Create application database - postgresql_db: - login_host: "{{ atl_db_host }}" - login_user: "{{ atl_db_root_user }}" - login_password: "{{ atl_db_root_password }}" - port: "{{ atl_db_port }}" - name: "{{ atl_jdbc_db_name }}" - owner: "{{ atl_jdbc_user }}" - encoding: "{{ atl_jdbc_encoding }}" - lc_collate: "{{ atl_jdbc_collation }}" - lc_ctype: "{{ atl_jdbc_ctype }}" - template: "{{ atl_jdbc_template }}" tags: - new_only diff --git a/roles/linux_common/defaults/main.yml b/roles/linux_common/defaults/main.yml index 561baf2..cb751b1 100644 --- a/roles/linux_common/defaults/main.yml +++ b/roles/linux_common/defaults/main.yml @@ -1,3 +1,4 @@ --- atl_product_user_uid: '2001' +git_version: "2.14.4" diff --git a/roles/nfs_server/tasks/ubuntu.yml b/roles/nfs_server/tasks/ubuntu.yml index 5bb5dcb..becb1d8 100644 --- a/roles/nfs_server/tasks/ubuntu.yml +++ b/roles/nfs_server/tasks/ubuntu.yml @@ -1,7 +1,7 @@ --- - name: Install Ubuntu-specific NFS packages - yum: + apt: name: - nfs-kernel-server - libnfs-utils diff --git a/roles/product_common/defaults/main.yml b/roles/product_common/defaults/main.yml index cb807b0..ce62ba4 100644 --- a/roles/product_common/defaults/main.yml +++ b/roles/product_common/defaults/main.yml @@ -2,7 +2,6 @@ java_version: "1.8.0" java_major_version: "8" postgres_version: "9.6" -git_version: "2.14.4" # Disable these when using the product installer, otherwise we end up # fighting with it. diff --git a/roles/product_install/defaults/main.yml b/roles/product_install/defaults/main.yml index 4b62635..67b01a6 100644 --- a/roles/product_install/defaults/main.yml +++ b/roles/product_install/defaults/main.yml @@ -19,9 +19,14 @@ atl_product_base_url: "{{ atl_release_base_url }}/{{ atl_product_family }}/downl atl_product_download_url: "{{ atl_product_base_url }}/atlassian-{{ atl_download_edition | default(atl_product_edition) }}-{{ atl_product_version }}{{ atl_download_suffix }}" atl_product_download_filename: "{{ atl_download_edition | default(atl_product_edition) }}.{{ atl_product_version }}{{ atl_download_suffix }}" -atl_product_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}" +atl_product_temp_download: "{{ atl_installer_temp }}/{{ atl_product_download_filename }}" atl_product_varfile: "{{ atl_installer_temp }}/{{ atl_product_family }}.varfile" +atl_product_home_shared_download_dir: "{{ atl_product_home_shared }}/downloads" +atl_product_home_shared_download: "{{ atl_product_home_shared_download_dir }}/{{ atl_product_download_filename }}" +atl_product_home_shared_moving_lock: "{{ atl_product_home_shared_download }}_moving" +atl_product_home_shared_completed_lock: "{{ atl_product_home_shared_download }}_completed" + atl_marketplace_base: "https://marketplace.atlassian.com" atl_mpac_products: "https://marketplace.atlassian.com/rest/2/products" atl_servicedesk_latest_url: "https://marketplace.atlassian.com/rest/2/products/key/jira-servicedesk/versions/latest" diff --git a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py index 55c71c6..bbd851d 100644 --- a/roles/product_install/molecule/bitbucket_latest/tests/test_default.py +++ b/roles/product_install/molecule/bitbucket_latest/tests/test_default.py @@ -37,6 +37,16 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/bitbucket.' + upstream + '-x64.bin') + installer = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/bitbucket/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/bitbucket/shared/downloads/bitbucket.' + upstream + '-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' diff --git a/roles/product_install/molecule/confluence_latest/tests/test_default.py b/roles/product_install/molecule/confluence_latest/tests/test_default.py index 47245a4..7ec072b 100644 --- a/roles/product_install/molecule/confluence_latest/tests/test_default.py +++ b/roles/product_install/molecule/confluence_latest/tests/test_default.py @@ -35,6 +35,15 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/confluence.'+upstream+'-x64.bin') + installer = host.file('/media/atl/confluence/shared-home/downloads/confluence.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/confluence/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/confluence/shared-home/downloads/confluence.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' diff --git a/roles/product_install/molecule/crowd_latest/Dockerfile.j2 b/roles/product_install/molecule/crowd_latest/Dockerfile.j2 new file mode 100644 index 0000000..e6aa95d --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/Dockerfile.j2 @@ -0,0 +1,14 @@ +# Molecule managed + +{% if item.registry is defined %} +FROM {{ item.registry.url }}/{{ item.image }} +{% else %} +FROM {{ item.image }} +{% endif %} + +RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ + elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ + elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ + elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ + elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ + elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/product_install/molecule/crowd_latest/molecule.yml b/roles/product_install/molecule/crowd_latest/molecule.yml new file mode 100644 index 0000000..7fd3163 --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/molecule.yml @@ -0,0 +1,30 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint +platforms: + - name: amazon_linux2 + image: amazonlinux:2 + groups: + - aws_node_local + - name: ubuntu_lts + image: ubuntu:bionic + groups: + - aws_node_local +provisioner: + name: ansible + options: + skip-tags: runtime_pkg + lint: + name: ansible-lint + inventory: + links: + group_vars: ../../../../group_vars/ +verifier: + name: testinfra + lint: + name: flake8 + enabled: false diff --git a/roles/product_install/molecule/crowd_latest/playbook.yml b/roles/product_install/molecule/crowd_latest/playbook.yml new file mode 100644 index 0000000..490514e --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/playbook.yml @@ -0,0 +1,12 @@ +--- +- name: Converge + hosts: all + vars: + atl_product_family: "crowd" + atl_product_edition: "crowd" + atl_product_user: "crowd" + atl_download_format: "tarball" + roles: + - role: linux_common + - role: product_common + - role: product_install diff --git a/roles/product_install/molecule/crowd_latest/tests/test_default.py b/roles/product_install/molecule/crowd_latest/tests/test_default.py new file mode 100644 index 0000000..b75a0b5 --- /dev/null +++ b/roles/product_install/molecule/crowd_latest/tests/test_default.py @@ -0,0 +1,52 @@ +import os +from six.moves import urllib +import json + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') + +def test_version_downloaded(host): + verfile = host.file('/media/atl/crowd/shared/crowd.version') + assert verfile.exists + +def test_symlink_created(host): + target = host.file('/opt/atlassian/crowd/current') + assert target.exists + assert target.is_symlink + +def test_unpacked(host): + verfile = host.file('/opt/atlassian/crowd/current/start_crowd.sh') + assert verfile.exists + +def test_version_file_is_latest(host): + verfile = host.file('/media/atl/crowd/shared/crowd.version') + assert verfile.exists + + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + assert verfile.content.decode("UTF-8").strip() == upstream.strip() + +def test_latest_is_downloaded(host): + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + installer = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '.tar.gz') + assert installer.exists + assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen( + "https://marketplace.atlassian.com/rest/2/applications/crowd/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/crowd/shared/downloads/crowd.' + upstream + '.tar.gz_completed') + assert lockfile.exists + assert lockfile.user == 'root' diff --git a/roles/product_install/molecule/default/tests/test_default.py b/roles/product_install/molecule/default/tests/test_default.py index 70839b9..930ab59 100644 --- a/roles/product_install/molecule/default/tests/test_default.py +++ b/roles/product_install/molecule/default/tests/test_default.py @@ -23,6 +23,15 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py b/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py index 05a6bb3..788c3de 100644 --- a/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py +++ b/roles/product_install/molecule/jira_cached_with_downgrade/tests/test_default.py @@ -14,10 +14,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.10.2" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.10.2-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.2-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.2-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.10.2/atlassian-jira/') assert installer.exists diff --git a/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py b/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py index ead6adf..0818e1b 100644 --- a/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py +++ b/roles/product_install/molecule/jira_cached_with_upgrade/tests/test_default.py @@ -14,10 +14,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.10.1" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.10.1-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.1-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.10.1-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.10.1/atlassian-jira/') assert installer.exists diff --git a/roles/product_install/molecule/jira_software_latest/tests/test_default.py b/roles/product_install/molecule/jira_software_latest/tests/test_default.py index 63451c8..de1dca3 100644 --- a/roles/product_install/molecule/jira_software_latest/tests/test_default.py +++ b/roles/product_install/molecule/jira_software_latest/tests/test_default.py @@ -35,6 +35,15 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-software.'+upstream+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-software.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-software.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_tarball/tests/test_default.py b/roles/product_install/molecule/jira_tarball/tests/test_default.py index 2f5d09b..11a7438 100644 --- a/roles/product_install/molecule/jira_tarball/tests/test_default.py +++ b/roles/product_install/molecule/jira_tarball/tests/test_default.py @@ -23,6 +23,15 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'.tar.gz') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'.tar.gz') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'.tar.gz_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_version_from_file/tests/test_default.py b/roles/product_install/molecule/jira_version_from_file/tests/test_default.py index 5f00577..b8a1966 100644 --- a/roles/product_install/molecule/jira_version_from_file/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_from_file/tests/test_default.py @@ -14,10 +14,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.9.0" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.9.0-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.9.0-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.9.0-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.9.0/atlassian-jira/') assert installer.exists diff --git a/roles/product_install/molecule/jira_version_latest/tests/test_default.py b/roles/product_install/molecule/jira_version_latest/tests/test_default.py index 70839b9..930ab59 100644 --- a/roles/product_install/molecule/jira_version_latest/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_latest/tests/test_default.py @@ -23,6 +23,15 @@ def test_latest_is_downloaded(host): upstream_json = json.load(upstream_fd) upstream = upstream_json['version'] - installer = host.file('/opt/atlassian/tmp/jira-core.'+upstream+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin') assert installer.exists assert installer.user == 'root' + +def test_completed_lockfile(host): + upstream_fd = urllib.request.urlopen("https://marketplace.atlassian.com/rest/2/applications/jira/versions/latest") + upstream_json = json.load(upstream_fd) + upstream = upstream_json['version'] + + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.'+upstream+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' \ No newline at end of file diff --git a/roles/product_install/molecule/jira_version_override/tests/test_default.py b/roles/product_install/molecule/jira_version_override/tests/test_default.py index 8b5c7a4..3f16801 100644 --- a/roles/product_install/molecule/jira_version_override/tests/test_default.py +++ b/roles/product_install/molecule/jira_version_override/tests/test_default.py @@ -14,10 +14,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "7.13.2" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/jira-core.7.13.2-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/jira-core.7.13.2-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/jira-core.7.13.2-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-core/7.13.2') assert installer.exists diff --git a/roles/product_install/molecule/servicedesk3/tests/test_default.py b/roles/product_install/molecule/servicedesk3/tests/test_default.py index 24afb62..5f50b6e 100644 --- a/roles/product_install/molecule/servicedesk3/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk3/tests/test_default.py @@ -14,10 +14,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "3.9.0" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/servicedesk.3.9.0-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/servicedesk.3.9.0-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/servicedesk.3.9.0-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-servicedesk/3.9.0') assert installer.exists diff --git a/roles/product_install/molecule/servicedesk4/tests/test_default.py b/roles/product_install/molecule/servicedesk4/tests/test_default.py index b660f23..5a22e8c 100644 --- a/roles/product_install/molecule/servicedesk4/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk4/tests/test_default.py @@ -14,10 +14,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == "4.1.0" def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/servicedesk.4.1.0-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/servicedesk.4.1.0-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/servicedesk.4.1.0-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-servicedesk/4.1.0') assert installer.exists diff --git a/roles/product_install/molecule/servicedesk_latest/tests/test_default.py b/roles/product_install/molecule/servicedesk_latest/tests/test_default.py index 2190295..cd975f3 100644 --- a/roles/product_install/molecule/servicedesk_latest/tests/test_default.py +++ b/roles/product_install/molecule/servicedesk_latest/tests/test_default.py @@ -23,10 +23,15 @@ def test_version_is_correct(host): assert verfile.content.decode("UTF-8").strip() == sd def test_is_downloaded(host): - installer = host.file('/opt/atlassian/tmp/servicedesk.'+sd+'-x64.bin') + installer = host.file('/media/atl/jira/shared/downloads/servicedesk.'+sd+'-x64.bin') assert installer.exists assert installer.user == 'root' +def test_completed_lockfile(host): + lockfile = host.file('/media/atl/jira/shared/downloads/servicedesk.'+sd+'-x64.bin_completed') + assert lockfile.exists + assert lockfile.user == 'root' + def test_is_unpacked(host): installer = host.file('/opt/atlassian/jira-servicedesk/'+sd) assert installer.exists diff --git a/roles/product_install/tasks/main.yml b/roles/product_install/tasks/main.yml index 6cea478..81cf0d0 100644 --- a/roles/product_install/tasks/main.yml +++ b/roles/product_install/tasks/main.yml @@ -116,6 +116,7 @@ - "{{ atl_product_home }}" - "{{ atl_product_installation_versioned }}" - "{{ atl_product_version_cache_dir }}" + - "{{ atl_product_home_shared_download_dir }}" changed_when: false # For Molecule idempotence check # At this point atl_product_version should be set, cache if necessary. @@ -125,18 +126,120 @@ dest: "{{ atl_product_version_cache }}" force: true +# For the first run a temp binary should be downloaded but moved to +# shared home to ensure all subsequent nodes have access +# to the same specific version binary. +# To prevent a race condition with multiple downloads at the same time +# a directory is used as a lockfile (atomic operation) when moving binary. -# Note: We don't the cache binary in the shared drive to the complexity -# around download race-conditions if multiple nodes are starting at -# the same time. When downloading from product-downloads.atlassian.com -# (which is a CDN) takes seconds anyway. -- name: Fetch product installer - get_url: - url: "{{ atl_product_download_url }}" - dest: "{{ atl_product_download }}" - mode: 0755 - force: false +- name: Set assumptions to avoid race condition + set_fact: + download_binary: true + move_binary: false + atl_product_download: "{{ atl_product_temp_download }}" +# Check for pre-downloaded binary on shared_home and completed lock dir. +- name: Check for completed lock directory + stat: + path: "{{ atl_product_home_shared_completed_lock }}" + register: completed_lock + +- name: Check for product installer in home_shared + stat: + path: "{{ atl_product_home_shared_download }}" + register: home_shared_download + +# If binary exists and lockdir exists use this binary instead +- name: Check lock directory and binary exists on shared_home + set_fact: + download_binary: false + atl_product_download: "{{ atl_product_home_shared_download }}" + when: + - home_shared_download.stat.exists + - completed_lock.stat.isdir is defined + - completed_lock.stat.isdir + +# Fetch binary if required +- name: download_binary is true so fetch and do all the things + block: + + # Fetch binary and copy to temp + - name: Fetch binary + get_url: + url: "{{ atl_product_download_url }}" + dest: "{{ atl_product_temp_download }}" + mode: 0755 + force: false + register: atl_product_completed + + # If product installer was fetched make the lock directory + - name: Create moving_lock. + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: directory + when: + - atl_product_completed is succeeded + register: moving_lock_created + + # Directory lock was created by this run? + # If so, then set a fact intending to move binary + - name: Move binary Scenario - lock created by this run + set_fact: + move_binary: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed + # Otherwise directory lock was either already created or + # could not be created. Fall back is to continue and install from temp + + when: download_binary + +# If the intention is to move binary to home_shared +- name: Move product installer to home_shared + block: + + - name: Copy temp installer to home_shared + copy: + src: "{{ atl_product_temp_download }}" + dest: "{{ atl_product_home_shared_download }}" + remote_src: true + when: + - moving_lock_created is succeeded + - moving_lock_created.changed + register: copied + + - name: Create completed_lock once product installer downloaded and copied + file: + path: "{{ atl_product_home_shared_completed_lock }}" + state: directory + when: copied is succeeded + register: completed_lock_created + + - name: Remove moving_lock to show that binary is completed + file: + path: "{{ atl_product_home_shared_moving_lock }}" + state: absent + when: + - completed_lock_created is succeeded + - copied is succeeded + register: moving_lock_removed + + - name: Delete old temp installer + file: + path: "{{ atl_product_temp_download }}" + state: absent + when: moving_lock_removed is succeeded + register: temp_deleted + + - name: Set install to home_shared location + set_fact: + atl_product_download: "{{ atl_product_home_shared_download }}" + when: temp_deleted is succeeded + + when: move_binary + +# At this point the binary is in {{ atl_product_download }} +# (which is either on home_shared or temp) - name: Unpack the downloaded application depending on format include_tasks: "unpack_{{ atl_download_format }}.yml" diff --git a/roles/product_install/tasks/unpack_installer.yml b/roles/product_install/tasks/unpack_installer.yml index 925dca0..f340463 100644 --- a/roles/product_install/tasks/unpack_installer.yml +++ b/roles/product_install/tasks/unpack_installer.yml @@ -10,6 +10,8 @@ # actions. For example, if root and the 'jira' user exists then it # will create 'jira1'; this potentially creates idempotency/upgrade # issues down the line. +# The variable {{ atl_product_download }} will be on temp for first nodes and shared_home for +# subsequent nodes. - name: Run the installer command: /bin/sh "{{ atl_product_download }}" -q -varfile "{{ atl_product_varfile }}" args: diff --git a/roles/product_startup/defaults/main.yml b/roles/product_startup/defaults/main.yml index ca1eda5..45d6f8a 100644 --- a/roles/product_startup/defaults/main.yml +++ b/roles/product_startup/defaults/main.yml @@ -14,5 +14,6 @@ atl_startup_exec_path: "{{ atl_product_installation_current }}/{{ atl_startup_sc atl_startup_exec_options: ["-fg"] atl_startup_systemd_params: [] - atl_systemd_service_name: "{{ atl_product_edition }}.service" + +atl_systemd_service_target: "multi-user.target" diff --git a/roles/product_startup/templates/product.service.j2 b/roles/product_startup/templates/product.service.j2 index 6b5077f..8310e88 100644 --- a/roles/product_startup/templates/product.service.j2 +++ b/roles/product_startup/templates/product.service.j2 @@ -15,4 +15,4 @@ ExecStart={{ atl_startup_exec_path }}{% for c in atl_startup_exec_options %} {{ Restart=on-failure [Install] -WantedBy=multi-target.target +WantedBy={{ atl_systemd_service_target }} diff --git a/roles/restore_backups/.yamllint b/roles/restore_backups/.yamllint new file mode 100644 index 0000000..a87f8ff --- /dev/null +++ b/roles/restore_backups/.yamllint @@ -0,0 +1,12 @@ +extends: default + +rules: + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + line-length: disable + truthy: disable + trailing-spaces: false diff --git a/roles/restore_backups/defaults/main.yml b/roles/restore_backups/defaults/main.yml new file mode 100644 index 0000000..6561c7e --- /dev/null +++ b/roles/restore_backups/defaults/main.yml @@ -0,0 +1,4 @@ +--- + +atl_backup_home_restore_canary_filename: ".slingshot_home_restore" +atl_backup_home_restore_canary_path: "{{ atl_product_home_shared }}/{{ atl_backup_home_restore_canary_filename }}" diff --git a/roles/restore_backups/molecule/default/Dockerfile.j2 b/roles/restore_backups/molecule/default/Dockerfile.j2 new file mode 100644 index 0000000..e6aa95d --- /dev/null +++ b/roles/restore_backups/molecule/default/Dockerfile.j2 @@ -0,0 +1,14 @@ +# Molecule managed + +{% if item.registry is defined %} +FROM {{ item.registry.url }}/{{ item.image }} +{% else %} +FROM {{ item.image }} +{% endif %} + +RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \ + elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install python sudo python-devel python*-dnf bash && dnf clean all; \ + elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl bash && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \ + elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml && zypper clean -a; \ + elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates; \ + elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates && xbps-remove -O; fi diff --git a/roles/restore_backups/molecule/default/molecule.yml b/roles/restore_backups/molecule/default/molecule.yml new file mode 100644 index 0000000..7f082f6 --- /dev/null +++ b/roles/restore_backups/molecule/default/molecule.yml @@ -0,0 +1,36 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint +platforms: + - name: amazon_linux2 + image: amazonlinux:2 + groups: + - aws_node_local + ulimits: + - nofile:262144:262144 + - name: ubuntu_lts + image: ubuntu:bionic + groups: + - aws_node_local + ulimits: + - nofile:262144:262144 +provisioner: + name: ansible + options: + skip-tags: runtime_pkg + lint: + name: ansible-lint + options: + x: ["701"] + inventory: + links: + group_vars: ../../../../group_vars/ +verifier: + name: testinfra + lint: + name: flake8 + enabled: false diff --git a/roles/restore_backups/molecule/default/playbook.yml b/roles/restore_backups/molecule/default/playbook.yml new file mode 100644 index 0000000..ffd0c12 --- /dev/null +++ b/roles/restore_backups/molecule/default/playbook.yml @@ -0,0 +1,10 @@ +--- +- name: Converge + hosts: all + vars: + atl_backup_manifest_url: '' + atl_backup_home_restore_canary_path: '/tmp/canary.tmp' + + roles: + # Should be no-op + - role: restore_backups diff --git a/roles/restore_backups/molecule/default/tests/test_default.py b/roles/restore_backups/molecule/default/tests/test_default.py new file mode 100644 index 0000000..0a7276f --- /dev/null +++ b/roles/restore_backups/molecule/default/tests/test_default.py @@ -0,0 +1,10 @@ +import os + +import testinfra.utils.ansible_runner + +testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( + os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') + + +def test_no_canary_file(host): + assert not host.file('atl_backup_home_restore_canary_path').exists diff --git a/roles/restore_backups/tasks/amazon.yml b/roles/restore_backups/tasks/amazon.yml new file mode 100644 index 0000000..bf32125 --- /dev/null +++ b/roles/restore_backups/tasks/amazon.yml @@ -0,0 +1,7 @@ +--- + +# Amazon Linux 2 supplies extra packages via a special command. +- name: Enable Postgresql from 'extras' + command: amazon-linux-extras install -y "postgresql{{ postgres_version }}" + args: + creates: /usr/bin/psql diff --git a/roles/restore_backups/tasks/main.yml b/roles/restore_backups/tasks/main.yml new file mode 100644 index 0000000..4d6865f --- /dev/null +++ b/roles/restore_backups/tasks/main.yml @@ -0,0 +1,133 @@ +--- + +# This role will attempt to fetch and load the backup manifest from a +# remote S3 URL. On successful completion the contents of JSON or YAML +# document will be in the var `atl_backup_manifest`. +# +# PREREQUISITES: +# * `atl_backup_manifest_url` points at the manifest. +# * The shared home filesystem is mounted if necessary (e.g. NFS/EFS). +# * The database has been created and the variable `db_created` is +# registered with the result (i.e: `register: db_created`). +# +# NOTE: The actual DB/FS restore operations could potentially be split +# out into discrete roles, but currently that is not required. +# +# TODO: Support HTTPS with authentication. Deferred until after the +# initial testing release. + +- block: + + - name: Ensure temp directory is present + file: + path: "{{ atl_installer_temp }}" + state: directory + mode: 0750 + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + changed_when: false # For Molecule idempotence check + + - name: Parse the manifest URL + set_fact: + atl_backup_manifest_url: "{{ atl_backup_manifest_url | urlsplit }}" + + - name: Extract manifest file information + set_fact: + atl_backup_manifest_bucket: "{{ atl_backup_manifest_url.hostname }}" + atl_backup_manifest_path: "{{ atl_backup_manifest_url.path }}" + atl_backup_manifest_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest_url.path | basename }}" + + - name: Fetch the manifest from S3 + aws_s3: + mode: get + overwrite: different + bucket: "{{ atl_backup_manifest_bucket }}" + object: "{{ atl_backup_manifest_path }}" + dest: "{{ atl_backup_manifest_dest }}" + when: atl_backup_manifest_url.scheme == 's3' + + - name: Load parameters from manifest + include_vars: + file: "{{ atl_backup_manifest_dest }}" + name: atl_backup_manifest + + - name: Define the DB and home dump destinations + set_fact: + # FIXME: The manifest format is still undecided so the + # following usages will need to be updated once it settles.. + atl_backup_id: "{{ atl_backup_manifest.name }}" + atl_backup_db_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.db.location.value | basename }}" + atl_backup_home_dest: "{{ atl_installer_temp }}/{{ atl_backup_manifest.artifacts.sharedHome.location.value | basename }}" + + # FIXME: Here we fetch the backups. However we may wish to stream + # these directly from S3 to the target DB/FS to avoid requiring + # disk-space for the intermediate files. + - name: Fetch DB backup from S3 + aws_s3: + mode: get + overwrite: different + bucket: "{{ atl_backup_manifest.artifacts.db.location.value | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.artifacts.db.location.value | urlsplit('path') }}" + dest: "{{ atl_backup_db_dest }}" + + - name: Fetch Home backup from S3 + aws_s3: + mode: get + overwrite: different + bucket: "{{ atl_backup_manifest.artifacts.sharedHome.location.value | urlsplit('hostname') }}" + object: "{{ atl_backup_manifest.artifacts.sharedHome.location.value | urlsplit('path') }}" + dest: "{{ atl_backup_home_dest }}" + + - name: Install distro-specific restore support packages + include_tasks: "{{ ansible_distribution|lower }}.yml" + + + - name: Restore application database + postgresql_db: + login_host: "{{ atl_db_host }}" + login_user: "{{ atl_db_root_user }}" + login_password: "{{ atl_db_root_password }}" + port: "{{ atl_db_port }}" + name: "{{ atl_jdbc_db_name }}" + owner: "{{ atl_jdbc_user }}" + encoding: "{{ atl_jdbc_encoding }}" + lc_collate: "{{ atl_jdbc_collation }}" + lc_ctype: "{{ atl_jdbc_ctype }}" + template: "{{ atl_jdbc_template }}" + # Depends on fetch_backup roles + state: restore + target: "{{ atl_backup_db_dest }}" + when: db_created.changed and atl_backup_db_dest is defined + + + - name: Check for the restore canary file + stat: + path: "{{ atl_backup_home_restore_canary_path }}" + register: restore_canary + + - block: + + - name: Create shared home if necessary + file: + path: "{{ atl_product_home_shared }}" + state: directory + mode: 0750 + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + + - name: Restore the shared-home backup + unarchive: + src: "{{ atl_backup_home_dest }}" + dest: "{{ atl_product_home_shared }}" + owner: "{{ atl_product_user }}" + group: "{{ atl_product_user }}" + + - name: Create restore-canary if necessary + copy: + dest: "{{ atl_backup_home_restore_canary_path }}" + content: "{{ atl_backup_id }}" + + when: not restore_canary.stat.exists + + + when: atl_restore_required