Compare commits
85 Commits
master
...
68edbd6451
| Author | SHA1 | Date | |
|---|---|---|---|
| 68edbd6451 | |||
| cf3d842e1d | |||
| 6309a62b79 | |||
| cda80a5487 | |||
| f0783701b2 | |||
| ec023ca375 | |||
| a8cf68f70e | |||
| f0d5169e9e | |||
| a629cb0286 | |||
| e445708ed4 | |||
| b02e6a2791 | |||
| 96ed5e47be | |||
| ac09d79fa9 | |||
| d0ae93751a | |||
| cd3817dfb6 | |||
| 567e99ee0c | |||
| 3a56d20104 | |||
| 646416dbf7 | |||
| 7ed9f5c2a0 | |||
| 39603cbb9b | |||
| 6c2301d7cc | |||
| ab52415f54 | |||
| e333809b4a | |||
| 7ac5a81774 | |||
| b07650cc79 | |||
| bc6d971aef | |||
| 8bfb365dfd | |||
| b0612af979 | |||
| c20af0bdf7 | |||
| bec4d2cac5 | |||
| e7ffb1b56c | |||
| e1aefa2527 | |||
| 800c7f062c | |||
| 43370eb837 | |||
| fd056ba0fa | |||
| e0fc6ab1fa | |||
| 0016b318e2 | |||
| 1990413fbe | |||
| ae0be16dd6 | |||
| 1e1d4d75a0 | |||
| 5a48dc5a61 | |||
| 0a7b67b6c5 | |||
| 39b2e4676e | |||
| 2bda08fd2f | |||
| f1639dce1e | |||
| 5df550669a | |||
| 38ce173ad5 | |||
| 088123b3ce | |||
| 9970e17d2f | |||
| d15779f99a | |||
| 2a9f5fb965 | |||
| 448e2e4423 | |||
| 0b214f734c | |||
| fd2989cd66 | |||
| 01c882d585 | |||
| 306eda9c3c | |||
| 09892ddc3d | |||
| 6a825e1dd7 | |||
| bef40c64c6 | |||
| cadb79cd26 | |||
| 58431d1d78 | |||
| dc1395daf1 | |||
| 1f3ca79d04 | |||
| 33ef563375 | |||
| 2e34cf9c85 | |||
| 5c1d5a3a5c | |||
| 6544f30114 | |||
| 182cdb20ae | |||
| c59b9f54bb | |||
| bb3578f997 | |||
| 2fa6554b9d | |||
| 75c709a5a5 | |||
| e81fc750cb | |||
| 27dd062900 | |||
| b98e464f07 | |||
| d5fdbbb9aa | |||
| c32fc17550 | |||
| 6b5df945de | |||
| 23a6dfcc62 | |||
| 5cedb22b51 | |||
| 3dc96bbf4d | |||
| 4ae14c54dc | |||
| b3f9e8fc80 | |||
| 8f839a4944 | |||
| 62b95d4e22 |
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,3 +1,7 @@
|
||||
# ---> Ansible
|
||||
*.retry
|
||||
|
||||
*.swp
|
||||
playbooks/testing.yml
|
||||
.mypy_cache/*
|
||||
*.idea
|
||||
**/__pycache__/
|
||||
.venv/
|
||||
|
||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "vars/secrets"]
|
||||
path = vars/secrets
|
||||
url = git@vcs.enp.one:omni/omni-ansible-secrets.git
|
||||
28
.pre-commit-config.yaml
Normal file
28
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
# All of the pre-commit hooks here actually use the `pytyhon` pre-commit language
|
||||
# setting. However, for the python language setting, pre-commit will create and manage
|
||||
# a cached virtual environment for each hook ID and do a bare `pip install <repo>` into
|
||||
# the venv to setup the hook. This can result in conflicting dependency versions between
|
||||
# the version installed to the pre-commit venv and the version installed to the Poetry
|
||||
# venv specified in the lockfile.
|
||||
#
|
||||
# The solution is to specify `language: system` for all hooks and then install the
|
||||
# required dependencies to the Poetry venv. The `system` language skips the isolated
|
||||
# venv creation and looks for the entrypoint specified by the hook in the global
|
||||
# environment which, if running in the Poetry venv, will find the entrypoint provided
|
||||
# by the Poetry-managed dependency.
|
||||
#
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.3.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
language: system
|
||||
- id: fix-encoding-pragma
|
||||
args:
|
||||
- "--remove"
|
||||
language: system
|
||||
- id: trailing-whitespace
|
||||
language: system
|
||||
- id: check-merge-conflict
|
||||
language: system
|
||||
13
Pipfile
13
Pipfile
@@ -1,13 +0,0 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[packages]
|
||||
ansible = "*"
|
||||
paramiko = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
213
Pipfile.lock
generated
213
Pipfile.lock
generated
@@ -1,213 +0,0 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "d6313730a0cb1941be53cf8e8e42bf51eb078f1edccefa3fc31fe38f7e36fcd2"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.7"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"ansible": {
|
||||
"hashes": [
|
||||
"sha256:a0153e2de3619b7e307df179cd91a3c3804cf1fe048273fe4ea5238b76679ff1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.8.4"
|
||||
},
|
||||
"asn1crypto": {
|
||||
"hashes": [
|
||||
"sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
|
||||
"sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
|
||||
],
|
||||
"version": "==0.24.0"
|
||||
},
|
||||
"bcrypt": {
|
||||
"hashes": [
|
||||
"sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89",
|
||||
"sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42",
|
||||
"sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294",
|
||||
"sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161",
|
||||
"sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31",
|
||||
"sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5",
|
||||
"sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c",
|
||||
"sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0",
|
||||
"sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de",
|
||||
"sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e",
|
||||
"sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052",
|
||||
"sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09",
|
||||
"sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105",
|
||||
"sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133",
|
||||
"sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7",
|
||||
"sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc"
|
||||
],
|
||||
"version": "==3.1.7"
|
||||
},
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774",
|
||||
"sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d",
|
||||
"sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90",
|
||||
"sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b",
|
||||
"sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63",
|
||||
"sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45",
|
||||
"sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25",
|
||||
"sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3",
|
||||
"sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b",
|
||||
"sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647",
|
||||
"sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016",
|
||||
"sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4",
|
||||
"sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb",
|
||||
"sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753",
|
||||
"sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7",
|
||||
"sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9",
|
||||
"sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f",
|
||||
"sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8",
|
||||
"sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f",
|
||||
"sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc",
|
||||
"sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42",
|
||||
"sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3",
|
||||
"sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909",
|
||||
"sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45",
|
||||
"sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d",
|
||||
"sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512",
|
||||
"sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff",
|
||||
"sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201"
|
||||
],
|
||||
"version": "==1.12.3"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c",
|
||||
"sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643",
|
||||
"sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216",
|
||||
"sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799",
|
||||
"sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a",
|
||||
"sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9",
|
||||
"sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc",
|
||||
"sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8",
|
||||
"sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53",
|
||||
"sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1",
|
||||
"sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609",
|
||||
"sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292",
|
||||
"sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e",
|
||||
"sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6",
|
||||
"sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed",
|
||||
"sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d"
|
||||
],
|
||||
"version": "==2.7"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013",
|
||||
"sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"
|
||||
],
|
||||
"version": "==2.10.1"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
|
||||
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
|
||||
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
|
||||
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
|
||||
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
|
||||
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
|
||||
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
|
||||
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
|
||||
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
|
||||
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
|
||||
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
|
||||
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
|
||||
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
|
||||
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
|
||||
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
|
||||
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
|
||||
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
|
||||
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
|
||||
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
|
||||
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
|
||||
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
|
||||
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
|
||||
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
|
||||
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
|
||||
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
|
||||
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
|
||||
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
|
||||
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
|
||||
],
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"paramiko": {
|
||||
"hashes": [
|
||||
"sha256:99f0179bdc176281d21961a003ffdb2ec369daac1a1007241f53374e376576cf",
|
||||
"sha256:f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.6.0"
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
|
||||
],
|
||||
"version": "==2.19"
|
||||
},
|
||||
"pynacl": {
|
||||
"hashes": [
|
||||
"sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255",
|
||||
"sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c",
|
||||
"sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e",
|
||||
"sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae",
|
||||
"sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621",
|
||||
"sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56",
|
||||
"sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39",
|
||||
"sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310",
|
||||
"sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1",
|
||||
"sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a",
|
||||
"sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786",
|
||||
"sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b",
|
||||
"sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b",
|
||||
"sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f",
|
||||
"sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20",
|
||||
"sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415",
|
||||
"sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715",
|
||||
"sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1",
|
||||
"sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0"
|
||||
],
|
||||
"version": "==1.3.0"
|
||||
},
|
||||
"pyyaml": {
|
||||
"hashes": [
|
||||
"sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9",
|
||||
"sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4",
|
||||
"sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8",
|
||||
"sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696",
|
||||
"sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34",
|
||||
"sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9",
|
||||
"sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73",
|
||||
"sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299",
|
||||
"sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b",
|
||||
"sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae",
|
||||
"sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681",
|
||||
"sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41",
|
||||
"sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"
|
||||
],
|
||||
"version": "==5.1.2"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
|
||||
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
|
||||
],
|
||||
"version": "==1.12.0"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
40
README.md
40
README.md
@@ -1,3 +1,41 @@
|
||||
# omni-ansible
|
||||
|
||||
Network Ansible configurations
|
||||
Network Ansible configurations
|
||||
|
||||
* The `omni_*` prefix is used for custom variables defined and used internally to
|
||||
distinguish them from `ansible_*` or other variables. The `_runtime_` prefix should
|
||||
be used for runtime variables
|
||||
* Passing `clean=true` should force cleaning any and all cached stuff
|
||||
* Passing `update=true` should update any unpinned _things_ to their latest version
|
||||
|
||||
Organizational notes:
|
||||
|
||||
* Playbooks should be platform/device agnostic. Any playbook should be runnable against
|
||||
any device. If the config a playbook deploys isn't applicable to that device then the
|
||||
playbook should be laid out so that it skips any inapplicable hosts.
|
||||
* Building from that, platform-conditionals should go in task files: `when` conditions
|
||||
in playbooks should be limited to runtime conditions.
|
||||
|
||||
Target notes:
|
||||
|
||||
* The `'mgmt'` target grants remote management access. This usually means SSH + local
|
||||
login access, but can also mean web interface (cockpit, erx, etc)
|
||||
|
||||
General workflow:
|
||||
|
||||
1. Run `provision.yml` - this gets the entire environment into a ready-to-go state but
|
||||
does not deploy any actual applications or perform any target tasks
|
||||
2. Run one or more `deploy-*.yml` - this deploys the application noted to the system
|
||||
3. Run one or more `do-*.yml` - this performs one off tasks
|
||||
|
||||
## local env creation
|
||||
|
||||
Requires Poetry-1.1+
|
||||
|
||||
```bash
|
||||
git clone https://vcs.enp.one/omni/omni-ansible.git
|
||||
|
||||
cd omni-ansible/
|
||||
|
||||
poetry install
|
||||
```
|
||||
|
||||
8
ansible.cfg
Normal file
8
ansible.cfg
Normal file
@@ -0,0 +1,8 @@
|
||||
[defaults]
|
||||
host_key_checking = false
|
||||
|
||||
[ssh_connection]
|
||||
ssh_args = "-C -o ControlMaster=auto -o ControlPersist=60s -o ForwardAgent=yes"
|
||||
|
||||
[inventory]
|
||||
enable_plugins = yaml
|
||||
87
en1.yml
Normal file
87
en1.yml
Normal file
@@ -0,0 +1,87 @@
|
||||
---
|
||||
all:
|
||||
vars:
|
||||
ansible_user: ansible
|
||||
ansible_python_interpreter: /opt/ansible/bin/python
|
||||
omni_ansible_venv: /opt/ansible
|
||||
update: false
|
||||
clean: false
|
||||
|
||||
children:
|
||||
|
||||
servers:
|
||||
vars:
|
||||
omni_local_hosts:
|
||||
- hostname: jupiter.svr.local
|
||||
ip: 192.168.42.10
|
||||
- hostname: remus.svr.local
|
||||
ip: 192.168.42.20
|
||||
- hostname: romulus.svr.local
|
||||
ip: 192.168.42.30
|
||||
hosts:
|
||||
jupiter:
|
||||
ansible_host: jupiter.net.enp.one
|
||||
omni_description: EN1 System Control Server
|
||||
omni_networking:
|
||||
eno1:
|
||||
dhcp: true
|
||||
dhcp_address: 10.42.101.10/42
|
||||
eno2:
|
||||
dhcp: false
|
||||
addresses: ["192.168.42.10/24"]
|
||||
remus:
|
||||
ansible_host: remus.net.enp.one
|
||||
omni_description: EN1 Hypervisor/Datastore
|
||||
omni_networking:
|
||||
eno1:
|
||||
dhcp: true
|
||||
dhcp_address: 10.42.101.20/24
|
||||
eno2:
|
||||
dhcp: false
|
||||
addresses: ["192.168.42.20/24"]
|
||||
romulus:
|
||||
ansible_host: romulus.net.enp.one
|
||||
omni_description: EN1 Hypervisor/Datastore
|
||||
omni_networking:
|
||||
eno1:
|
||||
dhcp: true
|
||||
dhcp_address: 10.42.101.30/24
|
||||
eno2:
|
||||
dhcp: false
|
||||
addresses: ["192.168.42.30/24"]
|
||||
children:
|
||||
virtualization: {}
|
||||
datastore: {}
|
||||
|
||||
virtualization:
|
||||
hosts:
|
||||
jupiter:
|
||||
omni_docker_configs: /etc/omni/compose
|
||||
omni_docker_swarm_iface: eno2
|
||||
children:
|
||||
virtualization_worker:
|
||||
hosts:
|
||||
remus:
|
||||
omni_docker_swarm_iface: eno2
|
||||
romulus:
|
||||
omni_docker_swarm_iface: eno2
|
||||
|
||||
datastore:
|
||||
children:
|
||||
datastore_arbiter:
|
||||
hosts:
|
||||
jupiter:
|
||||
omni_datastore_mount: /mnt/datastore
|
||||
omni_gluster_brick:
|
||||
mount: /mnt/brick0
|
||||
fs: xfs
|
||||
datastore_block:
|
||||
hosts:
|
||||
remus:
|
||||
omni_gluster_brick:
|
||||
mount: /mnt/brick0
|
||||
fs: xfs
|
||||
romulus:
|
||||
omni_gluster_brick:
|
||||
mount: /mnt/brick0
|
||||
fs: xfs
|
||||
14
en2.yml
Normal file
14
en2.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
---
|
||||
all:
|
||||
vars:
|
||||
ansible_user: ansible
|
||||
omni_protected_users: ["root", "ansible"]
|
||||
ungrouped:
|
||||
hosts:
|
||||
nimbus-1:
|
||||
ansible_host: en2.enp.one
|
||||
omni_description: EN2 Digitial Ocean Cloud Server
|
||||
omni_os:
|
||||
name: centos
|
||||
version: "7"
|
||||
omni_targets: ["admin", "network"]
|
||||
@@ -1,8 +0,0 @@
|
||||
---
|
||||
ansible_user: ansible
|
||||
|
||||
disable_gnome_user_list: True
|
||||
|
||||
protected_users:
|
||||
- root
|
||||
- ansible
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
enable_gui: False
|
||||
|
||||
enable_ssh: True
|
||||
|
||||
enable_ssh_password_auth: False
|
||||
|
||||
disable_sudo_password: True
|
||||
|
||||
enable_networkd: True
|
||||
|
||||
generate_keys: False
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
enable_gui: False
|
||||
|
||||
enable_ssh: True
|
||||
|
||||
enable_ssh_password_auth: False
|
||||
|
||||
disable_sudo_password: False
|
||||
|
||||
enable_networkd: True
|
||||
|
||||
generate_keys: False
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
enable_gui: False
|
||||
|
||||
enable_ssh: True
|
||||
|
||||
enable_ssh_password_auth: False
|
||||
|
||||
disable_sudo_password: True
|
||||
|
||||
enable_networkd: True
|
||||
|
||||
generate_keys: False
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
enable_gui: True
|
||||
|
||||
enable_ssh: False
|
||||
|
||||
enable_ssh_password_auth: False
|
||||
|
||||
disable_sudo_password: False
|
||||
|
||||
enable_networkd: False
|
||||
|
||||
generate_keys: False
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
description: "EN1 Reverse Proxy / EN1 VPN Server"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- vpn
|
||||
@@ -1,8 +0,0 @@
|
||||
---
|
||||
description: "Wandering excursion"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- workstations
|
||||
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
description: "EN2 Digitial Ocean Cloud Server"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- web
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
description: "EN1 Secondary Datastore"
|
||||
targets:
|
||||
- admin
|
||||
- datastore
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
description: "And the Last"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- workstations
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
description: "EN1 Primary Datastore / EN1 Secondary Hypervisor"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- datastore
|
||||
|
||||
networking:
|
||||
ovirtmgt:
|
||||
@@ -1,10 +0,0 @@
|
||||
---
|
||||
description: "EN1 Primary Hypervisor"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- datastore
|
||||
- hypervisor
|
||||
|
||||
networking:
|
||||
ovirtmgt:
|
||||
@@ -1,33 +0,0 @@
|
||||
---
|
||||
description: EN1 Core Router
|
||||
|
||||
ansible_network_os: edgeos
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- network
|
||||
|
||||
network:
|
||||
ethernet_eth0:
|
||||
address: dhcp
|
||||
description: UPLINK
|
||||
extra:
|
||||
- duplex auto
|
||||
- speed auto
|
||||
ethernet_eth1:
|
||||
address: 10.42.100.1/24
|
||||
description: PUBLIC
|
||||
extra:
|
||||
- duplex auto
|
||||
- speed auto
|
||||
ethernet_eth2:
|
||||
address: 10.42.101.1/24
|
||||
description: PRIVATE
|
||||
extra:
|
||||
- duplex auto
|
||||
- speed auto
|
||||
ethernet_eth2_vif_10:
|
||||
address: 10.42.102.1/24
|
||||
description: SECURE
|
||||
extra:
|
||||
- mtu 1500
|
||||
@@ -1,8 +0,0 @@
|
||||
---
|
||||
description: "Smooth as Silk"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- workstations
|
||||
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
@@ -1,8 +0,0 @@
|
||||
---
|
||||
description: "Watcher who Watches the Watchmen"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- workstations
|
||||
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
description: "Database Host: MariaDB"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
description: "Database Host: MySQL"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
description: "Database Host: PrometheusDB"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
description: "Development Host: Nginx Web Server"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- web
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
description: "Application Host: Bitwarden"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- bitwarden
|
||||
|
||||
networking:
|
||||
eth0:
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
description: "Application Host: Gitea"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- gitea
|
||||
|
||||
networking:
|
||||
eth0:
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
description: "Application Host: Minecraft"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- minecraft
|
||||
|
||||
networking:
|
||||
eth0:
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
description: "Application Host: Nextcloud"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- nextcloud
|
||||
|
||||
networking:
|
||||
eth0:
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
description: "Application Host: Plex Media Server"
|
||||
|
||||
targets:
|
||||
- admin
|
||||
- plex
|
||||
25
omni.ini
25
omni.ini
@@ -1,25 +0,0 @@
|
||||
[network]
|
||||
router.net.enp.one
|
||||
|
||||
[servers]
|
||||
romulus.net.enp.one
|
||||
remus.net.enp.one
|
||||
novis.tre2.local
|
||||
|
||||
[vms]
|
||||
vm-db-mysql.net.enp.one
|
||||
vm-dev-nginx.net.enp.one
|
||||
vm-host-gitea.net.enp.one
|
||||
vm-host-plex.net.enp.one
|
||||
vm-host-bitwarden.net.enp.one
|
||||
vm-host-nextcloud.net.enp.one
|
||||
vm-host-minecraft.net.enp.one
|
||||
|
||||
[cloud]
|
||||
nimbus-1.net.enp.one
|
||||
|
||||
[workstations]
|
||||
omega
|
||||
vigil-nox.tre2.local
|
||||
serico-nox.tre2.local
|
||||
inerro.tre2.local
|
||||
89
playbooks/configure-env.yml
Normal file
89
playbooks/configure-env.yml
Normal file
@@ -0,0 +1,89 @@
|
||||
---
|
||||
- name: Configure environment
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Set hostname
|
||||
become: true
|
||||
hostname:
|
||||
name: "{{ ansible_host }}"
|
||||
|
||||
- import_tasks: tasks/sshd/banner.yml
|
||||
|
||||
- name: Install global bash components
|
||||
become: true
|
||||
copy:
|
||||
src: bash/{{ item }}.sh
|
||||
dest: /etc/profile.d/Z-{{ 10 + loop_index }}-enpn-{{ item }}.sh
|
||||
mode: 0644
|
||||
loop:
|
||||
- global
|
||||
- pyenv
|
||||
- aliases
|
||||
- helpers
|
||||
loop_control:
|
||||
index_var: loop_index
|
||||
label: "{{ item }}"
|
||||
|
||||
- name: Disable dynamic MOTD
|
||||
become: true
|
||||
replace:
|
||||
path: /etc/pam.d/sshd
|
||||
regexp: "^session\\s+optional\\s+pam_motd\\.so.*$"
|
||||
replace: "#session optional pam_motd.so"
|
||||
|
||||
- name: Remove legacy global bashrc
|
||||
become: true
|
||||
file:
|
||||
path: /etc/profile.d/ZA-enpn-bashrc.sh
|
||||
state: absent
|
||||
|
||||
- name: Disable case-sensitive autocomplete
|
||||
become: true
|
||||
lineinfile:
|
||||
path: /etc/inputrc
|
||||
line: set completion-ignore-case ((o|O)(n|ff))
|
||||
create: true
|
||||
mode: 0644
|
||||
|
||||
- name: Configure additional security settings on shared servers
|
||||
hosts: servers
|
||||
tasks:
|
||||
- name: Identify local home directories
|
||||
become: true
|
||||
find:
|
||||
file_type: directory
|
||||
path: /home/
|
||||
recurse: false
|
||||
register: _local_home_dirs
|
||||
|
||||
- name: Determine files to write-protect
|
||||
set_fact:
|
||||
_secure_files: >-
|
||||
{{ _secure_files | default([]) + [
|
||||
item.path ~ '/.bashrc',
|
||||
item.path ~ '/.bash_profile',
|
||||
item.path ~ '/.ssh/authorized_keys',
|
||||
item.path ~ '/.ssh/config'
|
||||
] }}
|
||||
loop: "{{ _local_home_dirs.files }}"
|
||||
loop_control:
|
||||
label: "{{ item.path }}"
|
||||
|
||||
- name: Fetch status of secure files
|
||||
become: true
|
||||
stat:
|
||||
path: "{{ item }}"
|
||||
loop: "{{ _secure_files }}"
|
||||
loop_control:
|
||||
label: "{{ item }}"
|
||||
register: _secure_file_stats
|
||||
|
||||
- name: Restrict access to secure files
|
||||
become: true
|
||||
file:
|
||||
path: "{{ item.item }}"
|
||||
state: "{{ 'file' if item.stat.exists else 'touch' }}"
|
||||
mode: 0400
|
||||
loop: "{{ _secure_file_stats.results }}"
|
||||
loop_control:
|
||||
label: "Write-protecting: {{ item.item }}"
|
||||
164
playbooks/configure-mgmt.yml
Normal file
164
playbooks/configure-mgmt.yml
Normal file
@@ -0,0 +1,164 @@
|
||||
---
|
||||
- name: Configure server management services
|
||||
hosts: servers
|
||||
tasks:
|
||||
- import_tasks: tasks/sshd/secure.yml
|
||||
|
||||
- name: Enable cockpit
|
||||
when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "8"
|
||||
become: true
|
||||
systemd:
|
||||
name: cockpit.socket
|
||||
enabled: true
|
||||
state: started
|
||||
|
||||
- name: Configure virtualization management services
|
||||
hosts: virtualization
|
||||
tasks:
|
||||
- name: Create docker group
|
||||
become: true
|
||||
group:
|
||||
name: docker
|
||||
state: present
|
||||
|
||||
- name: Configure local accounts
|
||||
hosts: all
|
||||
vars_files:
|
||||
- vars/accounts.yml
|
||||
- vars/secrets/passwords.yml
|
||||
- vars/sshkeys.yml
|
||||
tasks:
|
||||
- name: Create omni group
|
||||
become: true
|
||||
group:
|
||||
name: "{{ omni_group.name }}"
|
||||
gid: "{{ omni_group.gid }}"
|
||||
state: present
|
||||
|
||||
- name: Determine existing omni users
|
||||
changed_when: false
|
||||
shell:
|
||||
cmd: 'grep omni /etc/group | cut --delimiter : --fields 4 | tr "," "\n"'
|
||||
register: _existing_omni_users
|
||||
|
||||
- name: Delete removed user accounts
|
||||
become: true
|
||||
when: item not in (omni_users | items2dict(key_name='name', value_name='uid'))
|
||||
user:
|
||||
name: "{{ item }}"
|
||||
state: absent
|
||||
loop: "{{ _existing_omni_users.stdout_lines }}"
|
||||
|
||||
- name: Delete removed user groups
|
||||
become: true
|
||||
when: item not in (omni_users | items2dict(key_name='name', value_name='uid'))
|
||||
group:
|
||||
name: "{{ item }}"
|
||||
state: absent
|
||||
loop: "{{ _existing_omni_users.stdout_lines }}"
|
||||
|
||||
- name: Delete removed user home directories
|
||||
become: true
|
||||
when: item not in (omni_users | items2dict(key_name='name', value_name='uid'))
|
||||
file:
|
||||
path: "/home/{{ item }}"
|
||||
state: absent
|
||||
loop: "{{ _existing_omni_users.stdout_lines }}"
|
||||
|
||||
- name: Create account groups
|
||||
become: true
|
||||
group:
|
||||
name: "{{ item.name }}"
|
||||
gid: "{{ item.uid }}"
|
||||
state: present
|
||||
loop: "{{ omni_users }}"
|
||||
loop_control:
|
||||
label: "{{ item.uid }},{{ item.name }}"
|
||||
|
||||
- name: Create accounts
|
||||
become: true
|
||||
user:
|
||||
name: "{{ item.name }}"
|
||||
state: present
|
||||
uid: "{{ item.uid }}"
|
||||
group: "{{ item.name }}"
|
||||
groups: >-
|
||||
{{
|
||||
[omni_group.name] +
|
||||
(['wheel' if ansible_os_family | lower == 'redhat' else 'sudo'] if item.admin | default(false) else []) +
|
||||
(['docker' if 'virtualization' in group_names else omni_group.name] if item.admin | default(false) else [])
|
||||
}}
|
||||
# The 'else omni_group.name' above is just some non-breaking value to cover the
|
||||
# false condition, it doesn't have special meaning
|
||||
comment: "{{ item.fullname | default('') }}"
|
||||
shell: "{{ '/bin/bash' if 'mgmt' in item.targets else '/bin/false' }}"
|
||||
system: "{{ item.svc | default(false) }}"
|
||||
generate_ssh_key: false
|
||||
password: "{{ omni_users_secrets[item.name] | default(none) }}"
|
||||
loop: "{{ omni_users }}"
|
||||
loop_control:
|
||||
label: "{{ item.uid }},{{ item.name }}"
|
||||
|
||||
- name: Disable sudo password for ansible
|
||||
become: true
|
||||
lineinfile:
|
||||
create: true
|
||||
path: /etc/sudoers.d/30-ansible
|
||||
line: "ansible ALL=(ALL) NOPASSWD:ALL"
|
||||
mode: 0644
|
||||
|
||||
- name: Ensure proper ownership of user home directories
|
||||
become: true
|
||||
file:
|
||||
path: /home/{{ item.name }}
|
||||
state: directory
|
||||
group: "{{ item.name }}"
|
||||
owner: "{{ item.name }}"
|
||||
mode: 0700
|
||||
loop: "{{ omni_users }}"
|
||||
loop_control:
|
||||
label: "{{ item.uid }},{{ item.name }}"
|
||||
|
||||
- name: Enforce root password
|
||||
become: true
|
||||
user:
|
||||
name: root
|
||||
password: "{{ omni_users_secrets.root }}"
|
||||
state: present
|
||||
|
||||
- name: Create SSH directory
|
||||
become: true
|
||||
file:
|
||||
path: /home/{{ item.name }}/.ssh
|
||||
owner: "{{ item.name }}"
|
||||
group: "{{ item.name }}"
|
||||
state: directory
|
||||
mode: 0755
|
||||
loop: "{{ omni_users }}"
|
||||
loop_control:
|
||||
label: "{{ item.uid }},{{ item.name }}"
|
||||
|
||||
- name: Update authorized keys
|
||||
become: true
|
||||
when: "'mgmt' in item.targets"
|
||||
authorized_key:
|
||||
user: "{{ item.name }}"
|
||||
key: "{{ omni_ssh_keys[item.name] | join('\n') }}"
|
||||
state: present
|
||||
exclusive: true
|
||||
loop: "{{ omni_users }}"
|
||||
loop_control:
|
||||
label: "{{ item.uid }},{{ item.name }}"
|
||||
|
||||
- name: Enforce ownership of authorized keys
|
||||
become: true
|
||||
when: "'mgmt' in item.targets"
|
||||
file:
|
||||
path: /home/{{ item.name }}/.ssh/authorized_keys
|
||||
state: file
|
||||
owner: "{{ item.name }}"
|
||||
group: "{{ item.name }}"
|
||||
mode: 0400
|
||||
loop: "{{ omni_users }}"
|
||||
loop_control:
|
||||
label: "{{ item.uid }},{{ item.name }}"
|
||||
34
playbooks/configure-network.yml
Normal file
34
playbooks/configure-network.yml
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
# - name: Configure router
|
||||
# hosts: router
|
||||
# gather_facts: false
|
||||
# pre_tasks:
|
||||
# - name: Collect EdgeOS facts
|
||||
# edgeos_facts:
|
||||
# gather_subset: "!config"
|
||||
# tasks:
|
||||
# - name: Configure interfaces
|
||||
# edgeos_config:
|
||||
# lines:
|
||||
# - set interfaces ethernet eth0 address dhcp
|
||||
# - set interfaces ethernet eth0 description EXTERNAL
|
||||
# - set interfaces ethernet eth1 address 10.42.100.1/24
|
||||
# - set interfaces ethernet eth1 address 10.42.99.1/24
|
||||
# - set interfaces ethernet eth1 description LOCAL
|
||||
# - set interfaces ethernet eth2 address 10.42.101.1/24
|
||||
# - set interfaces ethernet eth2 description DOMAIN
|
||||
|
||||
- name: Configure server networking
|
||||
hosts: servers
|
||||
tasks:
|
||||
- import_tasks: tasks/networkd/install.yml
|
||||
- import_tasks: tasks/networkd/configure.yml
|
||||
- import_tasks: tasks/networkd/services.yml
|
||||
|
||||
- name: Configure local hostsfile
|
||||
become: true
|
||||
lineinfile:
|
||||
path: /etc/hosts
|
||||
state: present
|
||||
line: "{{ item.ip }} {{ item.hostname }}"
|
||||
loop: "{{ omni_local_hosts | default([]) }}"
|
||||
54
playbooks/configure-webproxy.yml
Normal file
54
playbooks/configure-webproxy.yml
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
- import_playbook: initialize.yml
|
||||
|
||||
|
||||
- name: Install Nginx
|
||||
hosts: jupiter
|
||||
handlers:
|
||||
- name: restart-nginx
|
||||
import_tasks: tasks/nginx/services.yml
|
||||
tasks:
|
||||
- import_tasks: tasks/nginx/install.yml
|
||||
|
||||
- name: Set required SELinux options
|
||||
become: true
|
||||
seboolean:
|
||||
name: httpd_can_network_connect
|
||||
persistent: true
|
||||
state: true
|
||||
notify:
|
||||
- restart-nginx
|
||||
|
||||
|
||||
- name: Configure Nginx
|
||||
hosts: jupiter
|
||||
vars_files:
|
||||
- vars/applications.yaml
|
||||
vars:
|
||||
_letsencrypt_cert_dir: /etc/letsencrypt/live
|
||||
handlers:
|
||||
- name: restart-nginx
|
||||
import_tasks: tasks/nginx/services.yml
|
||||
tasks:
|
||||
- name: Install server configuration
|
||||
become: true
|
||||
copy:
|
||||
src: nginx/nginx.conf
|
||||
dest: /etc/nginx/nginx.conf
|
||||
notify:
|
||||
- restart-nginx
|
||||
|
||||
- name: Install application configurations
|
||||
when: item.value.published.host is defined
|
||||
become: true
|
||||
template:
|
||||
src: nginx/{{ item.key }}.nginx.conf.j2
|
||||
dest: /etc/nginx/conf.d/{{ item.key }}.conf
|
||||
owner: nginx
|
||||
group: "{{ ansible_user }}"
|
||||
mode: 0755
|
||||
loop: "{{ omni_compose_apps | dict2items }}"
|
||||
loop_control:
|
||||
label: "{{ item.key }} ({{ item.value.published.host | default('none') }})"
|
||||
notify:
|
||||
- restart-nginx
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
- hosts: all
|
||||
name: Ansible python bindings
|
||||
tags: always
|
||||
tasks:
|
||||
- import_tasks: tasks/centos/bindings.yml
|
||||
when: ansible_distribution == "CentOS"
|
||||
- import_tasks: tasks/fedora/bindings.yml
|
||||
when: ansible_distribution == "Fedora"
|
||||
79
playbooks/deploy-compose.yml
Normal file
79
playbooks/deploy-compose.yml
Normal file
@@ -0,0 +1,79 @@
|
||||
---
|
||||
- name: Prompt for input
|
||||
hosts: all
|
||||
tags:
|
||||
- always
|
||||
gather_facts: false
|
||||
vars_prompt:
|
||||
- name: application
|
||||
prompt: Enter name of application stack to deploy
|
||||
private: false
|
||||
vars_files:
|
||||
- vars/applications.yaml
|
||||
tasks:
|
||||
- name: Validate user input
|
||||
assert:
|
||||
that: application in omni_compose_apps.keys()
|
||||
|
||||
- name: Set facts for usage later
|
||||
set_fact:
|
||||
_runtime_application: "{{ application }}"
|
||||
|
||||
|
||||
- import_playbook: initialize.yml
|
||||
|
||||
|
||||
- name: Build image
|
||||
hosts: virtualization
|
||||
vars_files:
|
||||
- vars/applications.yaml
|
||||
tasks:
|
||||
- import_tasks: tasks/docker/build.yml
|
||||
|
||||
|
||||
- name: Configure datastore
|
||||
hosts: jupiter
|
||||
vars_files:
|
||||
- vars/applications.yaml
|
||||
- vars/secrets/applications.yaml
|
||||
tasks:
|
||||
- name: Create application datastore directory
|
||||
become: true
|
||||
file:
|
||||
path: "{{ omni_datastore_mount }}{{ omni_compose_apps[_runtime_application].datastore }}"
|
||||
state: directory
|
||||
owner: "{{ omni_compose_apps[_runtime_application].account.name }}"
|
||||
group: "{{ omni_compose_apps[_runtime_application].account.name }}"
|
||||
mode: 0750
|
||||
|
||||
|
||||
- name: Configure docker stack
|
||||
hosts: jupiter
|
||||
vars_files:
|
||||
- vars/applications.yaml
|
||||
- vars/secrets/applications.yaml
|
||||
tasks:
|
||||
- name: Create compose configuration directory
|
||||
become: true
|
||||
file:
|
||||
path: "{{ omni_docker_configs }}/{{ _runtime_application }}"
|
||||
state: directory
|
||||
owner: "{{ ansible_user }}"
|
||||
group: docker
|
||||
mode: 0750
|
||||
|
||||
- name: Install docker-compose file
|
||||
become: true
|
||||
template:
|
||||
src: docker-compose/{{ _runtime_application }}.yaml.j2
|
||||
dest: "{{ omni_docker_configs }}/{{ _runtime_application }}/docker-compose.yaml"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: docker
|
||||
mode: 0640
|
||||
|
||||
- name: Deploy the stack
|
||||
docker_stack:
|
||||
name: "{{ _runtime_application }}"
|
||||
state: present
|
||||
compose:
|
||||
- "{{ omni_docker_configs }}/{{ _runtime_application }}/docker-compose.yaml"
|
||||
@@ -1,67 +0,0 @@
|
||||
---
|
||||
- hosts: nimbus-1.net.enp.one
|
||||
name: Deploy documentation
|
||||
vars:
|
||||
# Local directory to use for cloning and building the documentation site
|
||||
DIR_BUILD: /tmp/docs
|
||||
# Remote directory to install the site at
|
||||
DIR_DEPLOY: /usr/share/nginx/doc.enp.one/html
|
||||
tasks:
|
||||
- name: Build the static site locally
|
||||
delegate_to: 127.0.0.1
|
||||
block:
|
||||
- name: Ensure the build directory does not exist
|
||||
file:
|
||||
path: "{{ DIR_BUILD }}"
|
||||
state: absent
|
||||
- name: Clone documentation repository
|
||||
git:
|
||||
repo: git@vcs.enp.one:omni/omni-docs.git
|
||||
dest: "{{ DIR_BUILD }}/"
|
||||
- name: Generate build env requirements file
|
||||
# Generate the requirements.txt style format, pipe through grep to remove
|
||||
# the index line (not sure why thats included at all tbh) and save the
|
||||
# result in "requirements.txt" to usage with pip
|
||||
shell: pipenv lock --requirements | grep --invert-match "\-i">requirements.txt
|
||||
args:
|
||||
chdir: "{{ DIR_BUILD }}/"
|
||||
- name: Create build env and install requirements
|
||||
pip:
|
||||
requirements: "{{ DIR_BUILD }}/requirements.txt"
|
||||
virtualenv: "{{ DIR_BUILD }}/venv"
|
||||
virtualenv_python: python3
|
||||
state: present
|
||||
- name: Build the static site using mkdocs
|
||||
shell: "{{ DIR_BUILD }}/venv/bin/mkdocs build"
|
||||
args:
|
||||
chdir: "{{ DIR_BUILD }}"
|
||||
|
||||
- name: Upload static site to remote
|
||||
copy:
|
||||
src: "{{ DIR_BUILD }}/site/"
|
||||
dest: "/tmp/docs/"
|
||||
- name: Remove legacy site
|
||||
become: true
|
||||
file:
|
||||
path: "{{ DIR_DEPLOY }}"
|
||||
state: absent
|
||||
- name: Copy static site to deployment directory
|
||||
become: true
|
||||
copy:
|
||||
src: "/tmp/docs/"
|
||||
dest: "{{ DIR_DEPLOY }}"
|
||||
remote_src: true
|
||||
owner: root
|
||||
group: nginx
|
||||
mode: 0755
|
||||
setype: httpd_sys_content_t
|
||||
|
||||
- name: Clean up local build directory
|
||||
delegate_to: 127.0.0.1
|
||||
file:
|
||||
path: "{{ DIR_BUILD }}"
|
||||
state: absent
|
||||
- name: Clean up remote temp directory
|
||||
file:
|
||||
path: /tmp/docs
|
||||
state: absent
|
||||
@@ -1,32 +0,0 @@
|
||||
---
|
||||
- hosts: all
|
||||
name: Update ssh keys on all devices
|
||||
tasks:
|
||||
- import_tasks: tasks/users-preprocessing.yml
|
||||
|
||||
- name: Install public keys
|
||||
tags: users_keys
|
||||
become: true
|
||||
block:
|
||||
- name: Ensure SSH directory exists
|
||||
file:
|
||||
state: directory
|
||||
path: /home/{{ item.name }}/.ssh
|
||||
loop: "{{ local_users | difference([None]) }}"
|
||||
|
||||
- name: Put keys on remote
|
||||
when: item.keys != []
|
||||
authorized_key:
|
||||
user: "{{ item.name }}"
|
||||
key: "{{ item.sshkeys | join('\n') }}"
|
||||
state: present
|
||||
exclusive: yes
|
||||
loop: "{{ local_users | difference([None]) }}"
|
||||
|
||||
- hosts: all
|
||||
name: Disable SSH password authentication
|
||||
tags:
|
||||
- always
|
||||
tasks:
|
||||
- import_tasks: tasks/sshd/disable-password-auth.yml
|
||||
when: enable_ssh_password_auth|bool == false
|
||||
1
playbooks/files
Symbolic link
1
playbooks/files
Symbolic link
@@ -0,0 +1 @@
|
||||
../resources
|
||||
@@ -1,16 +0,0 @@
|
||||
function up() { cd $(eval printf '../'%.0s {1..$1}); }
|
||||
alias fuck='sudo $(history -p \!\!)'
|
||||
alias doc='cd ~/Documents'
|
||||
alias explorer='nautilus'
|
||||
alias version='uname -orp && lsb_release -a | grep Description'
|
||||
alias activate='source ./bin/activate'
|
||||
alias ipconfig='ip address show'
|
||||
alias cls='clear'
|
||||
alias mklink='ln -s'
|
||||
alias ls='ls -lshF --color --group-directories-first --time-style=long-iso'
|
||||
alias gg='cd ~/Git'
|
||||
parse_git_branch() {
|
||||
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/'
|
||||
}
|
||||
|
||||
export PS1="\[\e[0;97m\]\[\e[37m\]\u\[\e[1;94m\]@\[\e[94m\]\H\[\e[0;33m\]$(parse_git_branch) \[\e[37m\]\w\[\e[33m\] \[\e[0;97m\]$\[\e[0m\] "
|
||||
117
playbooks/initialize.yml
Normal file
117
playbooks/initialize.yml
Normal file
@@ -0,0 +1,117 @@
|
||||
---
|
||||
- name: Bootstrap remote ansible environment
|
||||
hosts: all
|
||||
tags:
|
||||
- always
|
||||
vars:
|
||||
# Set this fact to allow the bootstrap play to run using the native system python
|
||||
# interpreter. A variable defined here is only in scope while this specific play
|
||||
# is being run; once this play is done this value is dropped and the default value
|
||||
# (which is actually set in the inventory file to the interpreter created by this
|
||||
# play) will be used.
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
tasks:
|
||||
- name: Determine runtime settings
|
||||
set_fact:
|
||||
_runtime_clean: "{{ true if (clean | bool) else false }}"
|
||||
_runtime_update: "{{ true if (update | bool) else false }}"
|
||||
_runtime_update_state: "{{ 'latest' if (update | bool) else 'present' }}"
|
||||
|
||||
- name: Clean bootstrap virtualenv
|
||||
when: _runtime_clean
|
||||
become: true
|
||||
file:
|
||||
path: "{{ omni_ansible_venv }}"
|
||||
state: absent
|
||||
|
||||
- name: Create bootstrap virtualenv directory
|
||||
become: true
|
||||
file:
|
||||
path: "{{ omni_ansible_venv }}"
|
||||
state: directory
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "{{ ansible_user }}"
|
||||
mode: 0755
|
||||
|
||||
- name: Create bootstrap virtualenv
|
||||
command:
|
||||
cmd: "{{ ansible_python_interpreter }} -m venv {{ omni_ansible_venv }} --system-site-packages"
|
||||
creates: "{{ omni_ansible_venv }}/bin/python"
|
||||
|
||||
- name: Generate remote requirements file locally
|
||||
delegate_to: 127.0.0.1
|
||||
command:
|
||||
cmd: poetry export --format requirements.txt
|
||||
changed_when: false
|
||||
register: _poetry_requirements
|
||||
|
||||
- name: Copy remote requirements file
|
||||
blockinfile:
|
||||
path: "{{ omni_ansible_venv }}/req.txt"
|
||||
create: true
|
||||
block: "{{ _poetry_requirements.stdout_lines | join('\n') }}"
|
||||
mode: 0644
|
||||
|
||||
- name: Install remote requirements
|
||||
pip:
|
||||
executable: "{{ omni_ansible_venv }}/bin/pip"
|
||||
requirements: "{{ omni_ansible_venv }}/req.txt"
|
||||
state: present
|
||||
|
||||
- name: Install CentOS 8 python bindings
|
||||
when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "8"
|
||||
become: true
|
||||
dnf:
|
||||
state: "{{ _runtime_update_state }}"
|
||||
name:
|
||||
- python3-libselinux
|
||||
- python3-policycoreutils
|
||||
- python3-firewall
|
||||
|
||||
- name: Install CentOS 7 python bindings
|
||||
when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "7"
|
||||
become: true
|
||||
yum:
|
||||
state: "{{ _runtime_update_state }}"
|
||||
name:
|
||||
- libselinux-python
|
||||
- policycoreutils-python
|
||||
- python-firewall
|
||||
|
||||
- name: Install Fedora python bindings
|
||||
when: ansible_distribution == "Fedora"
|
||||
become: true
|
||||
dnf:
|
||||
state: "{{ _runtime_update_state }}"
|
||||
name:
|
||||
- libselinux-python
|
||||
- policycoreutils-python
|
||||
- python3-firewall
|
||||
|
||||
|
||||
- name: Check meta environment
|
||||
hosts: all
|
||||
tags:
|
||||
- always
|
||||
tasks:
|
||||
- name: Check required operating system
|
||||
when: omni_os is defined
|
||||
assert:
|
||||
that:
|
||||
- omni_os.name == ansible_distribution | lower
|
||||
- omni_os.version_major == ansible_distribution_major_version
|
||||
fail_msg: >-
|
||||
Remote is running OS '{{ ansible_distribution }} {{ ansible_distribution_major_version }}',
|
||||
expected '{{ omni_os.name }} {{ omni_os.version_major }}'
|
||||
success_msg: >-
|
||||
Remote is running expected OS '{{ ansible_distribution }}
|
||||
{{ ansible_distribution_major_version }}'
|
||||
|
||||
- name: Check required interpreter settings
|
||||
assert:
|
||||
that:
|
||||
- ansible_python_interpreter.startswith(omni_ansible_venv) is true
|
||||
fail_msg: >-
|
||||
Interpreter '{{ ansible_python_interpreter }}'
|
||||
is not in the expected venv '{{ omni_ansible_venv }}'
|
||||
success_msg: Interpreter '{{ ansible_python_interpreter }}' is in the expected venv"
|
||||
29
playbooks/provision-common.yml
Normal file
29
playbooks/provision-common.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
---
|
||||
- import_playbook: initialize.yml
|
||||
|
||||
|
||||
- name: Configure system settings
|
||||
hosts: all
|
||||
vars_files:
|
||||
- vars/packages.yml
|
||||
pre_tasks:
|
||||
- import_tasks: tasks/centos-8-kernelplus.yml
|
||||
tasks:
|
||||
- import_tasks: tasks/packages/clean.yml
|
||||
when: _runtime_clean is true
|
||||
|
||||
- import_tasks: tasks/packages/repos.yml
|
||||
|
||||
- import_tasks: tasks/packages/update.yml
|
||||
when: _runtime_update is true
|
||||
|
||||
- import_tasks: tasks/packages/install.yml
|
||||
|
||||
|
||||
- import_playbook: configure-network.yml
|
||||
|
||||
|
||||
- import_playbook: configure-mgmt.yml
|
||||
|
||||
|
||||
- import_playbook: configure-env.yml
|
||||
2
playbooks/provision-datastore.yml
Normal file
2
playbooks/provision-datastore.yml
Normal file
@@ -0,0 +1,2 @@
|
||||
---
|
||||
# TBW
|
||||
61
playbooks/provision-swarm.yml
Normal file
61
playbooks/provision-swarm.yml
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
# TBW
|
||||
|
||||
# - import_playbook: provision-common.yml
|
||||
#
|
||||
#
|
||||
# - name: Install and start Docker
|
||||
# hosts: virtualization
|
||||
# tasks:
|
||||
# - import_tasks: tasks/docker/install.yml
|
||||
#
|
||||
# - name: Start and enable docker service
|
||||
# become: true
|
||||
# systemd:
|
||||
# name: docker
|
||||
# state: started
|
||||
# enabled: yes
|
||||
#
|
||||
# - name: Allow swarm traffic through the firewall
|
||||
# become: true
|
||||
# firewalld:
|
||||
# zone: trusted
|
||||
# interface: "{{ item.key }}"
|
||||
# permanent: true
|
||||
# state: enabled
|
||||
#
|
||||
#
|
||||
# - name: Configure swarm master
|
||||
# hosts: "{{ omni_docker_swarm_manager }}"
|
||||
# tasks:
|
||||
# - name: Initialize swarm
|
||||
# docker_swarm:
|
||||
# state: present
|
||||
# advertise_addr: "{{ omni_docker_swarm_iface }}"
|
||||
#
|
||||
# - name: Set swarm master to DRAIN
|
||||
# docker_node:
|
||||
# hostname: "{{ ansible_host }}"
|
||||
# availability: drain
|
||||
#
|
||||
# - name: Configure swarm nodes
|
||||
# hosts:
|
||||
# - remus
|
||||
# - romulus
|
||||
# tags: docker-nodes
|
||||
# tasks:
|
||||
# - name: Fetch docker swarm information
|
||||
# delegate_to: jupiter
|
||||
# docker_swarm_info:
|
||||
# register: _swarm_info
|
||||
#
|
||||
# - name: Join workers to swarm
|
||||
# docker_swarm:
|
||||
# state: join
|
||||
# remote_addrs: ["jupiter.svr.local"]
|
||||
# join_token: "{{ _swarm_info.swarm_facts.JoinTokens.Worker }}"
|
||||
# advertise_addr: "{{ omni_docker_swarm_iface }}"
|
||||
#
|
||||
# # docker plugin install --alias glusterfs trajano/glusterfs-volume-plugin:v2.0.3 --grant-all-permissions --disable
|
||||
# # docker plugin set glusterfs SERVERS=jupiter.svr.local,remus.svr.local,romulus.svr.local
|
||||
# # docker plugin enable glusterfs
|
||||
@@ -1,26 +0,0 @@
|
||||
---
|
||||
- hosts: vms
|
||||
name: Replace NetworkManager with systemd-networkd
|
||||
tasks:
|
||||
- name: Install systemd-networkd
|
||||
when: enable_networkd == true
|
||||
block:
|
||||
- import_tasks: tasks/centos/networkd.yml
|
||||
when: ansible_distribution == "CentOS"
|
||||
- import_tasks: tasks/fedora/networkd.yml
|
||||
when: ansible_distribution == "Fedora"
|
||||
# - import_tasks: common/debian/networkd.yml
|
||||
# when: ansible_distribution == "Debian" or ansible_distribution == "Ubuntu"
|
||||
|
||||
- import_tasks: tasks/networkd/config.yml
|
||||
- import_tasks: tasks/networkd/services.yml
|
||||
|
||||
|
||||
- hosts: vms
|
||||
name: Install ovirt agent
|
||||
tasks:
|
||||
- name: Install ovirt-agent
|
||||
become: true
|
||||
yum:
|
||||
name: ovirt-guest-agent
|
||||
state: latest
|
||||
@@ -1,47 +1,16 @@
|
||||
---
|
||||
- import_playbook: dependencies.yml
|
||||
# First: meta setup. Check everything is as we expect and that we have a remote
|
||||
# venv with required dependencies
|
||||
- import_playbook: initialize.yml
|
||||
|
||||
# Second: initial setup. Enforces the system to a "known good" state that we can
|
||||
# work with
|
||||
- import_playbook: provision-common.yml
|
||||
|
||||
- hosts: all
|
||||
name: Init
|
||||
tags: initialize
|
||||
tasks:
|
||||
- name: Set hostname
|
||||
become: true
|
||||
hostname:
|
||||
name: "{{ default_host if default_host is defined else inventory_hostname }}"
|
||||
# Third: setup the datastore. Lots of downstream stuff won't work without the ability
|
||||
# to mount data storage
|
||||
- import_playbook: provision-datastore.yml
|
||||
|
||||
- name: Install global bashrc
|
||||
become: true
|
||||
copy:
|
||||
src: bashrc.sh
|
||||
dest: /etc/profile.d/global-bashrc.sh
|
||||
mode: 0644
|
||||
|
||||
- import_tasks: tasks/sshd/banner.yml
|
||||
|
||||
|
||||
- hosts: all
|
||||
name: System packages
|
||||
tags: initialize
|
||||
tasks:
|
||||
- name: Load package variables
|
||||
include_vars:
|
||||
file: packages.yml
|
||||
- import_tasks: tasks/centos/repositories.yml
|
||||
when: ansible_distribution == "CentOS"
|
||||
- import_tasks: tasks/centos/packages.yml
|
||||
when: ansible_distribution == "CentOS"
|
||||
- import_tasks: tasks/fedora/packages.yml
|
||||
when: ansible_distribution == "Fedora"
|
||||
# - import_tasks: tasks/debian/packages.yml
|
||||
# when: ansible_distribution == "Debian" or ansible_distribution == "Ubuntu"
|
||||
|
||||
|
||||
#- import_playbook: provision-workstation.yml
|
||||
#- import_playbook: provision-server.yml
|
||||
#- import_playbook: provision-hypervisor.yml
|
||||
- import_playbook: provision-virtual-machine.yml
|
||||
|
||||
|
||||
- import_playbook: update.yml
|
||||
# Finally: setup the docker swarm. Configures the workers, security, web proxy, and
|
||||
# management system. Once done, applications are ready for deployment
|
||||
- import_playbook: provison-swarm.yml
|
||||
|
||||
1
playbooks/roles
Symbolic link
1
playbooks/roles
Symbolic link
@@ -0,0 +1 @@
|
||||
../roles
|
||||
1
playbooks/templates
Symbolic link
1
playbooks/templates
Symbolic link
@@ -0,0 +1 @@
|
||||
../resources
|
||||
@@ -1,8 +0,0 @@
|
||||
|
||||
//////////// //// //// ///////////
|
||||
//// ////// //// //// ////
|
||||
//////// //// /// //// ///////////
|
||||
//// //// ////// ////
|
||||
//////////// //// //// {{ description | default('Omni Network System') }}
|
||||
_______________________________{{ description | default('Omni Network System') | length * '\\' }}\
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# ANSIBLE MANAGED FILE - DO NOT EDIT
|
||||
[Match]
|
||||
Name={{ item.key }}
|
||||
|
||||
[Network]
|
||||
DHCP=Yes
|
||||
|
||||
# EOF
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
# - hosts: vm-host-plex.net.enp.one
|
||||
# #gather_facts: false
|
||||
# tasks:
|
||||
# - name: Query plex API (shhh) to load latest releases
|
||||
# get_url:
|
||||
# url: https://plex.tv/api/downloads/5.json
|
||||
# dest: "{{ plex_releases_file | default('/tmp/plexreleases.json') }}"
|
||||
|
||||
- hosts: vm-host-plex.net.enp.one
|
||||
name: Update Plex Media Server to latest version
|
||||
vars:
|
||||
plex_releases: "{{ lookup('url', 'https://plex.tv/api/downloads/5.json') | from_json }}"
|
||||
tasks:
|
||||
- name: Identifiy the proper release file
|
||||
when: (ansible_os_family | lower == item["distro"]) and (ansible_distribution | lower in item["label"] | lower) and (ansible_userspace_bits in item["label"])
|
||||
set_fact:
|
||||
plex_release_url: "{{ item.url }}"
|
||||
plex_release_checksum: "{{ item.checksum }}"
|
||||
loop: "{{ plex_releases['computer']['Linux']['releases'] }}"
|
||||
|
||||
- name: Download package
|
||||
get_url:
|
||||
url: "{{ plex_release_url }}"
|
||||
checksum: sha1:{{ plex_release_checksum }}
|
||||
dest: /tmp/plexmediaserver-{{ plex_release_checksum }}.{{ plex_release_url.split(".")[-1] }}
|
||||
|
||||
- name: Stop the PMS service
|
||||
become: true
|
||||
systemd:
|
||||
name: "{{ plex_service | default('plexmediaserver') }}"
|
||||
state: stopped
|
||||
|
||||
- name: Install update package
|
||||
become: true
|
||||
block:
|
||||
- name: Install update package using DNF
|
||||
when: ansible_distribution == "Fedora"
|
||||
dnf:
|
||||
name: /tmp/plexmediaserver-{{ plex_release_checksum }}.rpm
|
||||
state: latest
|
||||
- name: Install update package using YUM
|
||||
when: ansible_distribution == "CentOS"
|
||||
yum:
|
||||
name: /tmp/plexmediaserver-{{ plex_release_checksum }}.rpm
|
||||
state: latest
|
||||
- name: Install update package using APT
|
||||
when: ansible_distribution == "Ubuntu" or ansible_distribution == "Debian"
|
||||
apt:
|
||||
name: /tmp/plexmediaserver-{{ plex_release_checksum }}.deb
|
||||
state: latest
|
||||
|
||||
- name: Start the PMS service
|
||||
become: true
|
||||
systemd:
|
||||
name: "{{ plex_service | default('plexmediaserver') }}"
|
||||
state: started
|
||||
@@ -1,24 +0,0 @@
|
||||
---
|
||||
- hosts: all
|
||||
name: Upgrade packages
|
||||
tasks:
|
||||
- name: Upgrade YUM packages
|
||||
when: ansible_distribution == "CentOS"
|
||||
become: true
|
||||
yum:
|
||||
state: latest
|
||||
name: "*"
|
||||
exclude: kernel*{{ ',' + exclude_upgrade | default('') }}
|
||||
|
||||
- name: Upgrade DNF packages
|
||||
when: ansible_distribution == "Fedora"
|
||||
become: true
|
||||
dnf:
|
||||
state: latest
|
||||
name: "*"
|
||||
exclude: kernel*{{ ',' + exclude_upgrade | default('') }}
|
||||
|
||||
# - name: Upgrade APT packages
|
||||
# when: ansible_distribution == "Debian" or ansible_distribution == "Ubuntu"
|
||||
# become: true
|
||||
# apt:
|
||||
@@ -1,132 +0,0 @@
|
||||
---
|
||||
- import_playbook: dependencies.yml
|
||||
|
||||
- hosts: all:!network
|
||||
name: Update local user accounts and access controls
|
||||
tasks:
|
||||
- import_tasks: tasks/users-preprocessing.yml
|
||||
|
||||
- name: Create local user accounts
|
||||
tags: users_create
|
||||
become: true
|
||||
block:
|
||||
- name: Create groups
|
||||
group:
|
||||
name: "{{ item }}"
|
||||
state: present
|
||||
loop: "{{ targets + ['omni'] }}"
|
||||
|
||||
- name: Create users
|
||||
user:
|
||||
name: "{{ item.name }}"
|
||||
comment: "{{ item.fullname | default('') }}"
|
||||
shell: /bin/bash
|
||||
groups: "{{ item.targets | intersect(targets) + ['omni'] }}"
|
||||
system: "{{ item.svc | default(False) }}"
|
||||
state: present
|
||||
generate_ssh_key: "{{ True if generate_keys | bool == true else False }}"
|
||||
ssh_key_comment: "{{ item.name }}@{{ inventory_hostname }}"
|
||||
ssh_key_bits: 4096
|
||||
ssh_key_type: ed25519
|
||||
password: "{{ item.password }}"
|
||||
loop: "{{ local_users }}"
|
||||
|
||||
- name: Delete removed user accounts
|
||||
become: true
|
||||
user:
|
||||
name: "{{ item }}"
|
||||
state: absent
|
||||
loop: "{{ local_removed_users | difference(protected_users) }}"
|
||||
|
||||
- name: Grant sudo permissions to admin user accounts
|
||||
become: true
|
||||
user:
|
||||
name: "{{ item.name }}"
|
||||
groups: "{{ 'wheel' if ansible_os_family | lower == 'redhat' else 'sudo' }}"
|
||||
state: present
|
||||
loop: "{{ local_admin_users }}"
|
||||
|
||||
- name: Disable sudo password for ansible
|
||||
become: true
|
||||
lineinfile:
|
||||
create: true
|
||||
path: /etc/sudoers.d/30-ansible
|
||||
line: "ansible ALL=(ALL) NOPASSWD:ALL"
|
||||
mode: 0644
|
||||
|
||||
- name: Disable sudo password for admin users
|
||||
become: true
|
||||
lineinfile:
|
||||
create: true
|
||||
path: /etc/sudoers.d/40-admin
|
||||
line: "{{ item.name }} ALL=(ALL) NOPASSWD:ALL"
|
||||
mode: 0644
|
||||
state: "{{ 'absent' if disable_sudo_password | bool == false else 'present' }}"
|
||||
loop: "{{ local_admin_users }}"
|
||||
|
||||
- name: Configure GNOME
|
||||
tags: users_gnome
|
||||
when: ansible_distribution == "Fedora" and disable_gnome_user_list | bool == true
|
||||
become: true
|
||||
block:
|
||||
- name: Configure GDM profile
|
||||
blockinfile:
|
||||
create: true
|
||||
path: /etc/dconf/profile/gdm
|
||||
block: |
|
||||
user-db:user
|
||||
system-db:gdm
|
||||
file-db:/usr/share/gdm/greeter-dconf-defaults
|
||||
- name: Configure GDM keyfile
|
||||
blockinfile:
|
||||
create: true
|
||||
path: /etc/dconf/db/gdm.d/00-login-screen
|
||||
block: |
|
||||
[org/gnome/login-screen]
|
||||
# Do not show the user list
|
||||
disable-user-list=true
|
||||
- name: Delete existing user database
|
||||
file:
|
||||
path: /var/lib/gdm/.config/dconf/user
|
||||
state: absent
|
||||
- name: Restart dconf database
|
||||
shell: dconf update
|
||||
|
||||
- name: Ensure proper ownership of user home directories
|
||||
become: true
|
||||
file:
|
||||
group: "{{ item.name }}"
|
||||
owner: "{{ item.name }}"
|
||||
path: /home/{{ item.name }}
|
||||
recurse: true
|
||||
state: directory
|
||||
loop: "{{ local_users }}"
|
||||
|
||||
# - hosts: router.net.enp.one
|
||||
# name: Configure users on router
|
||||
# connection: network_cli
|
||||
# vars:
|
||||
# ansible_network_os: edgeos
|
||||
# tasks:
|
||||
# - import_tasks: tasks/users-preprocessing.yml
|
||||
#
|
||||
# - name: Create users
|
||||
# edgeos_config:
|
||||
# lines:
|
||||
# - set system login user {{ item.name }} authentication encrypted-password "{{ item.password }}"
|
||||
# - set system login user {{ item.name }} full-name "{{ item.fullname if item.fullname is defined else "" }}"
|
||||
# - set system login user {{ item.name }} level {{ 'operator' if item.name != 'ansible' else 'admin' }}
|
||||
# loop: "{{ local_users | difference([None]) }}"
|
||||
#
|
||||
# - name: Grant administrative access to admin users
|
||||
# edgeos_config:
|
||||
# lines:
|
||||
# - set system login user {{ item.name }} level admin
|
||||
# loop: "{{ local_admin_users | difference([None]) }}"
|
||||
#
|
||||
# - name: Assemble key files for loadkey usage
|
||||
# edgeos_command:
|
||||
# commands: sudo tee /tmp/{{ item.name }}.keys<<<"{{ item.sshkeys | join('\n') }}"
|
||||
# loop: "{{ local_admin_users | difference([None]) }}"
|
||||
#
|
||||
# - import_playbook: deploy-sshkeys.yml
|
||||
@@ -1,34 +0,0 @@
|
||||
---
|
||||
- hosts: router.net.enp.one
|
||||
name: Configure users on router
|
||||
connection: network_cli
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- import_tasks: tasks/users-preprocessing.yml
|
||||
|
||||
- name: Create users
|
||||
edgeos_config:
|
||||
lines:
|
||||
- set system login user {{ item.name }} authentication encrypted-password "{{ item.password }}"
|
||||
- set system login user {{ item.name }} full-name "{{ item.fullname if item.fullname is defined else "" }}"
|
||||
- set system login user {{ item.name }} level {{ 'operator' if item.name != 'ansible' else 'admin' }}
|
||||
loop: "{{ local_users | difference([None]) }}"
|
||||
|
||||
- name: Grant administrative access to admin users
|
||||
edgeos_config:
|
||||
lines:
|
||||
- set system login user {{ item.name }} level admin
|
||||
with_items:
|
||||
- "{{ local_admin_users | difference([None]) }}"
|
||||
|
||||
- name: Assemble loadkey files
|
||||
edgeos_command:
|
||||
commands:
|
||||
- sudo tee "{{ item.sshkeys | join('\n') }}"<<</tmp/{{ item.name }}.keys
|
||||
loop: "{{ local_admin_users | difference([None]) }}"
|
||||
|
||||
- name: Load keys
|
||||
edgeos_config:
|
||||
lines:
|
||||
- loadkey {{ item }} /tmp/{{ item }}.keys
|
||||
loop: "{{ local_admin_users | difference([None]) }}"
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
- import_playbook: dependencies.yml
|
||||
|
||||
- import_playbook: update-system.yml
|
||||
- import_playbook: update-users-local.yml
|
||||
1520
poetry.lock
generated
Normal file
1520
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
24
pyproject.toml
Normal file
24
pyproject.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[tool.poetry]
|
||||
name = "omni-ansible"
|
||||
version = "0.0.0"
|
||||
description = "Network deployment procedures and configuration state"
|
||||
authors = ["Ethan Paul <me@enp.one>"]
|
||||
license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
ansible = "^2.9.4"
|
||||
docker = "^4.2.0"
|
||||
docker-compose = "^1.25.4"
|
||||
paramiko = "^2.7.1"
|
||||
jsondiff = "^1.2.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
ansible-lint = "^4.2.0"
|
||||
ansible-toolbox = "^0.3"
|
||||
pre-commit = "^2.9.2"
|
||||
pre-commit-hooks = "^3.3.0"
|
||||
safety = "^1.9.0"
|
||||
tox = "^3.20.1"
|
||||
tox-poetry-installer = "^0.5.2"
|
||||
yamllint = "^1.20.0"
|
||||
57
resources/atom-config.cson
Normal file
57
resources/atom-config.cson
Normal file
@@ -0,0 +1,57 @@
|
||||
"*":
|
||||
"autocomplete-python":
|
||||
useKite: false
|
||||
core:
|
||||
disabledPackages: [
|
||||
"about"
|
||||
"background-tips"
|
||||
"github"
|
||||
"image-view"
|
||||
"metrics"
|
||||
"open-on-github"
|
||||
]
|
||||
telemetryConsent: "no"
|
||||
themes: [
|
||||
"one-dark-ui"
|
||||
"base16-tomorrow-dark-theme"
|
||||
]
|
||||
editor:
|
||||
fontSize: 16
|
||||
invisibles: {}
|
||||
preferredLineLength: 100
|
||||
"exception-reporting":
|
||||
userId: "21f90c70-b680-4a55-a906-c8d67e98bf28"
|
||||
"ide-python":
|
||||
pylsPlugins:
|
||||
flake8:
|
||||
ignore: [
|
||||
"E121"
|
||||
"E123"
|
||||
"E126"
|
||||
"E226"
|
||||
"E24"
|
||||
"E704"
|
||||
"W503"
|
||||
"W504"
|
||||
"E501"
|
||||
]
|
||||
pycodestyle:
|
||||
ignore: [
|
||||
"E121"
|
||||
"E123"
|
||||
"E126"
|
||||
"E226"
|
||||
"E24"
|
||||
"E704"
|
||||
"W503"
|
||||
"E501"
|
||||
]
|
||||
maxLineLength: 100
|
||||
pyflakes: {}
|
||||
pylint:
|
||||
enabled: true
|
||||
rope_completion: {}
|
||||
python: "python3.7"
|
||||
"tree-view": {}
|
||||
welcome:
|
||||
showOnStartup: false
|
||||
4
resources/bash/aliases-workstation.sh
Normal file
4
resources/bash/aliases-workstation.sh
Normal file
@@ -0,0 +1,4 @@
|
||||
alias doc='cd ~/Documents'
|
||||
alias dn='cd ~/Downloads'
|
||||
alias gg='cd ~/Git'
|
||||
alias explorer='nautilus'
|
||||
12
resources/bash/aliases.sh
Normal file
12
resources/bash/aliases.sh
Normal file
@@ -0,0 +1,12 @@
|
||||
alias bk='cd -'
|
||||
alias fuck='sudo $(history -p \!\!)'
|
||||
alias ls='ls -lshF --color --group-directories-first --time-style=long-iso'
|
||||
alias version='uname -orp && lsb_release -a | grep Description'
|
||||
alias activate='source ./bin/activate'
|
||||
alias cls='clear'
|
||||
alias ls='/usr/bin/ls -lshF --color --group-directories-first --time-style=long-iso'
|
||||
alias gmtime='/usr/bin/date -u --iso-8601=seconds'
|
||||
alias date='/usr/bin/date --iso-8601=seconds'
|
||||
alias whatismyip='curl https://icanhazip.com/'
|
||||
alias uuid="python3 -c 'import uuid; print(uuid.uuid4());'"
|
||||
alias epoch="python3 -c 'import time; print(time.time());'"
|
||||
7
resources/bash/global.sh
Normal file
7
resources/bash/global.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
function _parse_git_branch() {
|
||||
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/'
|
||||
}
|
||||
|
||||
export PS1="\[\e[0;97m\]\[\e[37m\e[1m\]\u\[\e[1;94m\]@\[\e[94m\]\H\[\e[0;33m\]\$(_parse_git_branch) \[\e[37m\]\w\[\e[33m\] \[\e[0;97m\]$\[\e[0m\] "
|
||||
export rc=/home/$USERNAME/.bashrc
|
||||
export VIRTUALENV_DIR=/home/$USERNAME/.venvs
|
||||
18
resources/bash/helpers.sh
Normal file
18
resources/bash/helpers.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
random() {
|
||||
if [[ $# -eq 0 ]]; then
|
||||
num=32
|
||||
else
|
||||
num=$1
|
||||
fi
|
||||
cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w $num | head -n 1
|
||||
}
|
||||
|
||||
function up() { cd $(eval printf '../'%.0s {1..$1}); }
|
||||
|
||||
function pipin() { pip freeze | grep $1; }
|
||||
|
||||
function passhash() {
|
||||
read -sp 'Password: ' tmppass;
|
||||
echo $tmppass | python3 -c 'import crypt; print(crypt.crypt(input(), crypt.mksalt(crypt.METHOD_SHA512)));';
|
||||
unset tmppass;
|
||||
}
|
||||
76
resources/bash/pyenv.sh
Normal file
76
resources/bash/pyenv.sh
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/env/bash
|
||||
|
||||
function pyenv () {
|
||||
usage="Custom Python virtualenv manager
|
||||
sivenv [list, delete, load, new] [VENV]
|
||||
Commands:
|
||||
list List existing virtualenvs (alias: 'ls')
|
||||
load VENV Activate the virtualenv named VENV (alias: 'source')
|
||||
new VENV [VERSION] Create and load a new virtualenv named VENV. Optionally VERSION
|
||||
can be a python version to use for creating the venv. Note that
|
||||
only python3 versions are supported.
|
||||
delete VENV Delete the virtualenv named VENV (alias: 'rm')";
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Error: no command specified" >&2;
|
||||
echo "$usage";
|
||||
return 1;
|
||||
fi;
|
||||
|
||||
case $1 in
|
||||
"-h"| "--help")
|
||||
echo "$usage";
|
||||
return 0;;
|
||||
"ls"| "list")
|
||||
lsvenv "$VIRTUALENV_DIR";;
|
||||
"rm"| "delete")
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Error: no virtualenv specified" >&2;
|
||||
return 1;
|
||||
fi;
|
||||
rm --recursive --force "${VIRTUALENV_DIR:?}/$2";;
|
||||
"source" | "load")
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Error: no virtualenv specified" >&2;
|
||||
return 1;
|
||||
fi;
|
||||
# shellcheck source=/dev/null
|
||||
source "$VIRTUALENV_DIR/$2/bin/activate";;
|
||||
"new")
|
||||
if [ $# -lt 2 ]; then
|
||||
echo "Error: no virtualenv specified" >&2;
|
||||
return 1;
|
||||
fi;
|
||||
if [ $# -eq 3 ]; then
|
||||
version="$3";
|
||||
else
|
||||
version="3";
|
||||
fi
|
||||
if ! command -v "python$version" &>/dev/null; then
|
||||
echo "Error: no interpreter found for python version '$version'" >&2;
|
||||
return 2;
|
||||
fi
|
||||
|
||||
if python$version -m venv "$VIRTUALENV_DIR/$2"; then
|
||||
echo "New virtualenv '$2' created using $(command -v python$version)" >&2;
|
||||
# shellcheck source=/dev/null
|
||||
source "$VIRTUALENV_DIR/$2/bin/activate"
|
||||
else
|
||||
return $?;
|
||||
fi;;
|
||||
*)
|
||||
echo "Error: unknown command '$1'" >&2;
|
||||
echo "$usage";
|
||||
return 1;;
|
||||
esac
|
||||
}
|
||||
|
||||
function lsvenv () {
|
||||
venvs=()
|
||||
for item in /usr/bin/ls -d "$1"/*/; do
|
||||
if stat "${item}/bin/activate" &>/dev/null; then
|
||||
venvs+=("$(basename "$item")");
|
||||
fi
|
||||
done
|
||||
echo "${venvs[*]}"
|
||||
}
|
||||
20
resources/bash/setup-atom.sh
Normal file
20
resources/bash/setup-atom.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
curl -o atom.rpm https://github.com/atom/atom/releases/download/v1.53.0/atom.x86_64.rpm
|
||||
dnf install atom.rpm
|
||||
|
||||
python3.7 -m pip install \
|
||||
python-language-server[all]==0.21.5 \
|
||||
parso==0.5.2 \
|
||||
jedi==0.15.2
|
||||
|
||||
apm install \
|
||||
atom-ide-ui@0.13.0 \
|
||||
atom-jinja2@0.6.0 \
|
||||
atom-typescript@14.1.2 \
|
||||
autocomplete-python@1.16.0 \
|
||||
ide-python@1.6.2 \
|
||||
ide-typescript@0.9.1 \
|
||||
language-docker \
|
||||
language-ini \
|
||||
language-restructuredtext \
|
||||
language-rpm-spec \
|
||||
minimap
|
||||
257
resources/docker-compose/bitwarden.yaml.j2
Normal file
257
resources/docker-compose/bitwarden.yaml.j2
Normal file
@@ -0,0 +1,257 @@
|
||||
---
|
||||
version: "{{ omni_compose_version | string }}"
|
||||
|
||||
|
||||
x-global-env: &globalenv
|
||||
LOCAL_UID: "{{ omni_compose_apps.bitwarden.account.uid | string }}"
|
||||
LOCAL_GID: "{{ omni_compose_apps.bitwarden.account.uid | string}}"
|
||||
ASPNETCORE_ENVIRONMENT: Production
|
||||
globalSettings__selfHosted: "true"
|
||||
globalSettings__baseServiceUri__vault: https://{{ omni_compose_apps.bitwarden.published.host }}
|
||||
globalSettings__baseServiceUri__api: https://{{ omni_compose_apps.bitwarden.published.host }}/api
|
||||
globalSettings__baseServiceUri__identity: https://{{ omni_compose_apps.bitwarden.published.host }}/identity
|
||||
globalSettings__baseServiceUri__admin: https://{{ omni_compose_apps.bitwarden.published.host }}/admin
|
||||
globalSettings__baseServiceUri__notifications: https://{{ omni_compose_apps.bitwarden.published.host }}/notifications
|
||||
globalSettings__baseServiceUri__internalNotifications: http://bitwarden_notifications:5000
|
||||
globalSettings__baseServiceUri__internalAdmin: http://bitwarden_admin:5000
|
||||
globalSettings__baseServiceUri__internalIdentity: http://bitwarden_identity:5000
|
||||
globalSettings__baseServiceUri__internalApi: http://bitwarden_api:5000
|
||||
globalSettings__baseServiceUri__internalVault: http://bitwarden_web:5000
|
||||
globalSettings__pushRelayBaseUri: https://push.bitwarden.com
|
||||
globalSettings__installation__identityUri: https://identity.bitwarden.com
|
||||
globalSettings__sqlServer__connectionString: "Data Source=tcp:mssql,1433;Initial Catalog=vault;Persist Security Info=False;User ID=sa;Password=e934c0bb-3b5a-4e6b-b525-cd6d83004e1a;MultipleActiveResultSets=False;Connect Timeout=30;Encrypt=True;TrustServerCertificate=True"
|
||||
globalSettings__identityServer__certificatePassword: {{ omni_compose_app_secrets.bitwarden.identity_server_certificate_password }}
|
||||
globalSettings__attachment__baseDirectory: /etc/bitwarden/core/attachments
|
||||
globalSettings__attachment__baseUrl: https://{{ omni_compose_apps.bitwarden.published.host }}/attachments
|
||||
globalSettings__dataProtection__directory: /etc/bitwarden/core/aspnet-dataprotection
|
||||
globalSettings__logDirectory: /etc/bitwarden/logs
|
||||
globalSettings__licenseDirectory: /etc/bitwarden/core/licenses
|
||||
globalSettings__internalIdentityKey: {{ omni_compose_app_secrets.bitwarden.internal_identity_key }}
|
||||
globalSettings__duo__aKey: {{ omni_compose_app_secrets.bitwarden.duo_akey }}
|
||||
globalSettings__installation__id: {{ omni_compose_app_secrets.bitwarden.installation_id }}
|
||||
globalSettings__installation__key: {{ omni_compose_app_secrets.bitwarden.installation_key }}
|
||||
globalSettings__yubico__clientId: REPLACE
|
||||
globalSettings__yubico__key: REPLACE
|
||||
globalSettings__mail__replyToEmail: noreply@enp.one
|
||||
globalSettings__mail__smtp__host: REPLACE
|
||||
globalSettings__mail__smtp__port: "587"
|
||||
globalSettings__mail__smtp__ssl: "false"
|
||||
globalSettings__mail__smtp__username: REPLACE
|
||||
globalSettings__mail__smtp__password: REPLACE
|
||||
globalSettings__disableUserRegistration: "false"
|
||||
globalSettings__hibpApiKey: REPLACE
|
||||
adminSettings__admins: ""
|
||||
|
||||
|
||||
volumes:
|
||||
bitwarden-db-data:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/mssql/data
|
||||
driver: glusterfs
|
||||
bitwarden-db-backup:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/mssql/backup
|
||||
bitwarden-nginx-data:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/nginx
|
||||
driver: glusterfs
|
||||
bitwarden-web:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/web
|
||||
driver: glusterfs
|
||||
bitwarden-ssl:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/ssl
|
||||
driver: glusterfs
|
||||
bitwarden-ca-certs:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/ca-certificates
|
||||
driver: glusterfs
|
||||
bitwarden-core:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/core
|
||||
driver: glusterfs
|
||||
bitwarden-identity:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/identity
|
||||
driver: glusterfs
|
||||
bitwarden-logs-api:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/api
|
||||
driver: glusterfs
|
||||
bitwarden-logs-db:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/mssql
|
||||
driver: glusterfs
|
||||
bitwarden-logs-identity:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/identity
|
||||
driver: glusterfs
|
||||
bitwarden-logs-nginx:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/nginx
|
||||
driver: glusterfs
|
||||
bitwarden-logs-admin:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/admin
|
||||
driver: glusterfs
|
||||
bitwarden-logs-icons:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/icons
|
||||
driver: glusterfs
|
||||
bitwarden-logs-notifications:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/notifications
|
||||
driver: glusterfs
|
||||
bitwarden-logs-events:
|
||||
name: datastore{{ omni_compose_apps.bitwarden.datastore }}/logs/events
|
||||
driver: glusterfs
|
||||
|
||||
|
||||
networks:
|
||||
bitwarden_internal:
|
||||
internal: true
|
||||
name: bitwarden_internal
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.bitwarden.networks.internal }}
|
||||
bitwarden_external:
|
||||
internal: false
|
||||
name: bitwarden_external
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.bitwarden.networks.external }}
|
||||
|
||||
|
||||
services:
|
||||
mssql:
|
||||
image: bitwarden/mssql:{{ omni_compose_apps.bitwarden.versions.mssql | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
stop_grace_period: 60s
|
||||
networks:
|
||||
- bitwarden_internal
|
||||
volumes:
|
||||
- bitwarden-db-data:/var/opt/mssql/data
|
||||
- bitwarden-db-backup:/etc/bitwarden/mssql/backups
|
||||
- bitwarden-logs-db:/var/opt/mssql/log
|
||||
environment:
|
||||
LOCAL_UID: "{{ omni_compose_apps.bitwarden.account.uid | string }}"
|
||||
LOCAL_GID: "{{ omni_compose_apps.bitwarden.account.uid | string }}"
|
||||
ACCEPT_EULA: "Y"
|
||||
MSSQL_PID: Express
|
||||
SA_PASSWORD: {{ omni_compose_app_secrets.bitwarden.mssql_sa_password }}
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
web:
|
||||
image: bitwarden/web:{{ omni_compose_apps.bitwarden.versions.web | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
networks:
|
||||
- bitwarden_internal
|
||||
volumes:
|
||||
- bitwarden-web:/etc/bitwarden/web
|
||||
environment: *globalenv
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
attachments:
|
||||
image: bitwarden/attachments:{{ omni_compose_apps.bitwarden.versions.attachments | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
networks:
|
||||
- bitwarden_internal
|
||||
volumes:
|
||||
- bitwarden-core:/etc/bitwarden/core
|
||||
environment: *globalenv
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
api:
|
||||
image: bitwarden/api:{{ omni_compose_apps.bitwarden.versions.api | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
volumes:
|
||||
- bitwarden-core:/etc/bitwarden/core
|
||||
- bitwarden-ca-certs:/etc/bitwarden/ca-certificates
|
||||
- bitwarden-logs-api:/etc/bitwarden/logs
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
identity:
|
||||
image: bitwarden/identity:{{ omni_compose_apps.bitwarden.versions.identity | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
volumes:
|
||||
- bitwarden-identity:/etc/bitwarden/identity
|
||||
- bitwarden-core:/etc/bitwarden/core
|
||||
- bitwarden-ca-certs:/etc/bitwarden/ca-certificates
|
||||
- bitwarden-logs-identity:/etc/bitwarden/logs
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
admin:
|
||||
image: bitwarden/admin:{{ omni_compose_apps.bitwarden.versions.admin | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
depends_on:
|
||||
- mssql
|
||||
volumes:
|
||||
- bitwarden-core:/etc/bitwarden/core
|
||||
- bitwarden-ca-certs:/etc/bitwarden/ca-certificates
|
||||
- bitwarden-logs-admin:/etc/bitwarden/logs
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
icons:
|
||||
image: bitwarden/icons:{{ omni_compose_apps.bitwarden.versions.icons | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
volumes:
|
||||
- bitwarden-ca-certs:/etc/bitwarden/ca-certificates
|
||||
- bitwarden-logs-icons:/etc/bitwarden/logs
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
notifications:
|
||||
image: bitwarden/notifications:{{ omni_compose_apps.bitwarden.versions.notifications | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
volumes:
|
||||
- bitwarden-ca-certs:/etc/bitwarden/ca-certificates
|
||||
- bitwarden-logs-notifications:/etc/bitwarden/logs
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
events:
|
||||
image: bitwarden/events:{{ omni_compose_apps.bitwarden.versions.events | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
volumes:
|
||||
- bitwarden-ca-certs:/etc/bitwarden/ca-certificates
|
||||
- bitwarden-logs-events:/etc/bitwarden/logs
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
nginx:
|
||||
image: bitwarden/nginx:{{ omni_compose_apps.bitwarden.versions.nginx | default(omni_compose_apps.bitwarden.versions.default) }}
|
||||
depends_on:
|
||||
- web
|
||||
- admin
|
||||
- api
|
||||
- identity
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.bitwarden.published.ports.8080 }}
|
||||
target: 8080
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.bitwarden.published.ports.8443 }}
|
||||
target: 8443
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
volumes:
|
||||
- bitwarden-nginx-data:/etc/bitwarden/nginx
|
||||
- bitwarden-ssl:/etc/ssl
|
||||
- bitwarden-logs-nginx:/var/log/nginx
|
||||
environment: *globalenv
|
||||
networks:
|
||||
- bitwarden_external
|
||||
- bitwarden_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
51
resources/docker-compose/gitea.yaml.j2
Normal file
51
resources/docker-compose/gitea.yaml.j2
Normal file
@@ -0,0 +1,51 @@
|
||||
---
|
||||
version: "{{ omni_compose_version | string }}"
|
||||
|
||||
|
||||
networks:
|
||||
gitea:
|
||||
name: gitea
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.gitea.networks.main }}
|
||||
|
||||
|
||||
volumes:
|
||||
gitea-data:
|
||||
name: datastore{{ omni_compose_apps.gitea.datastore }}
|
||||
driver: glusterfs
|
||||
|
||||
|
||||
services:
|
||||
server:
|
||||
image: gitea/gitea:{{ omni_compose_apps.gitea.versions.gitea | default(omni_compose_apps.gitea.versions.default) }}
|
||||
hostname: gitea
|
||||
networks:
|
||||
- gitea
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.gitea.published.ports.3000 }}
|
||||
target: 3000
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.gitea.published.ports.22 }}
|
||||
target: 22
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
volumes:
|
||||
- type: volume
|
||||
source: gitea-data
|
||||
target: /data
|
||||
read_only: false
|
||||
environment:
|
||||
USER_UID: "{{ omni_compose_apps.gitea.account.uid | string }}"
|
||||
USER_GID: "{{ omni_compose_apps.gitea.account.uid | string }}"
|
||||
APP_NAME: ENP VCS
|
||||
RUN_MODE: prod
|
||||
DOMAIN: jupiter.net.enp.one
|
||||
ROOT_URL: https://{{ omni_compose_apps.gitea.published.host }}/
|
||||
DB_TYPE: sqlite3
|
||||
DISABLE_REGISTRATION: "true"
|
||||
deploy:
|
||||
replicas: 1
|
||||
53
resources/docker-compose/minecraft.yaml.j2
Normal file
53
resources/docker-compose/minecraft.yaml.j2
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
version: "{{ omni_compose_version | string }}"
|
||||
|
||||
|
||||
networks:
|
||||
minecraft:
|
||||
name: minecraft
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.minecraft.networks.main }}
|
||||
|
||||
|
||||
volumes:
|
||||
minecraft-data:
|
||||
name: datastore{{ omni_compose_apps.minecraft.datastore }}
|
||||
driver: glusterfs
|
||||
|
||||
|
||||
services:
|
||||
server:
|
||||
image: itzg/minecraft-server:{{ omni_compose_apps.minecraft.versions.main }}
|
||||
hostname: minecraft
|
||||
networks:
|
||||
- minecraft
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.minecraft.published.ports.25565 }}
|
||||
target: 25565
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
volumes:
|
||||
- type: volume
|
||||
source: minecraft-data
|
||||
target: /data
|
||||
read_only: false
|
||||
environment:
|
||||
EULA: "TRUE"
|
||||
TZ: Americas/New_York
|
||||
VERSION: {{ omni_compose_apps.minecraft.versions.server }}
|
||||
MAX_MEMORY: "8G"
|
||||
MOTD: "A home for buttery companions"
|
||||
MODE: survival
|
||||
OPS: ScifiGeek42
|
||||
WHITELIST: "ScifiGeek42,fantasycat256,CoffeePug,Snowdude21325,KaiserSJR,glutenfreebean"
|
||||
MAX_BUILD_HEIGHT: "512"
|
||||
SNOOPER_ENABLED: "false"
|
||||
ICON: https://cdn.enp.one/img/logos/e-w-sm.png
|
||||
ENABLE_RCON: "false"
|
||||
UID: "{{ omni_compose_apps.minecraft.account.uid | string }}"
|
||||
GID: "{{ omni_compose_apps.minecraft.account.uid | string }}"
|
||||
deploy:
|
||||
replicas: 1
|
||||
90
resources/docker-compose/plex.yaml.j2
Normal file
90
resources/docker-compose/plex.yaml.j2
Normal file
@@ -0,0 +1,90 @@
|
||||
---
|
||||
version: "{{ omni_compose_version | string }}"
|
||||
|
||||
|
||||
networks:
|
||||
plex:
|
||||
name: plex
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.plex.networks.main }}
|
||||
|
||||
|
||||
volumes:
|
||||
plex-config:
|
||||
name: datastore{{ omni_compose_apps.plex.datastore }}
|
||||
driver: glusterfs
|
||||
plex-data:
|
||||
name: plex-data
|
||||
driver: local
|
||||
driver_opts:
|
||||
type: nfs
|
||||
o: "addr=plexistore.tre2.local,ro"
|
||||
device: ":/nfs/plex"
|
||||
plex-personal:
|
||||
name: datastore/media
|
||||
driver: glusterfs
|
||||
|
||||
|
||||
services:
|
||||
server:
|
||||
image: plexinc/pms-docker:{{ omni_compose_apps.plex.versions.default }}
|
||||
hostname: plex-media-server
|
||||
networks:
|
||||
- plex
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.plex.published.ports.32400 }}
|
||||
target: 32400
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.3005 }}
|
||||
target: 3005
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.8324 }}
|
||||
target: 8324
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.32469 }}
|
||||
target: 32469
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.1900 }}
|
||||
target: 1900
|
||||
protocol: udp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.32410 }}
|
||||
target: 32410
|
||||
protocol: udp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.32413 }}
|
||||
target: 32413
|
||||
protocol: udp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.plex.published.ports.32414 }}
|
||||
target: 32414
|
||||
protocol: udp
|
||||
mode: ingress
|
||||
volumes:
|
||||
- type: volume
|
||||
source: plex-config
|
||||
target: /config
|
||||
read_only: false
|
||||
- type: volume
|
||||
source: plex-data
|
||||
target: /data
|
||||
read_only: true
|
||||
- type: volume
|
||||
source: plex-personal
|
||||
target: /personal
|
||||
read_only: false
|
||||
environment:
|
||||
TZ: "Americas/New_York"
|
||||
ALLOWED_NETWORKS: 10.42.100.0/24,10.42.101.0/24
|
||||
PLEX_UID: "{{ omni_compose_apps.plex.account.uid }}"
|
||||
PLEX_GID: "{{ omni_compose_apps.plex.account.uid }}"
|
||||
ADVERTISE_IP: "http://10.42.101.10:32400/"
|
||||
deploy:
|
||||
replicas: 1
|
||||
138
resources/docker-compose/scipio.yaml.j2
Normal file
138
resources/docker-compose/scipio.yaml.j2
Normal file
@@ -0,0 +1,138 @@
|
||||
---
|
||||
version: "{{ omni_compose_version | string }}"
|
||||
|
||||
|
||||
x-global-env: &globalenv
|
||||
SCIPIO_SECRET_KEY: {{ omni_compose_app_secrets.scipio.application_key }}
|
||||
SCIPIO_DB_BACKEND: MARIA
|
||||
SCIPIO_DB_HOST: database
|
||||
SCIPIO_DB_PORT: "3306"
|
||||
SCIPIO_DB_USERNAME: root
|
||||
SCIPIO_DB_PASSWORD: {{ omni_compose_app_secrets.scipio.database_password }}
|
||||
SCIPIO_DB_SCHEMA: scipio
|
||||
SCIPIO_LOG_LEVEL: debug
|
||||
SCIPIO_LOG_RETENTION: "864000"
|
||||
SCIPIO_LOG_BACKEND: redis
|
||||
SCIPIO_LOG_REDIS_SCHEMA: "0"
|
||||
SCIPIO_LOG_REDIS_HOSTNAME: cache
|
||||
SCIPIO_PHANTOM_FEED: https://blog.tipranks.com/feed/
|
||||
SCIPIO_PHANTOM_HANDLER: tipranks
|
||||
SCIPIO_EXECUTOR_HANDLER: hologram
|
||||
SCIPIO_THRESHOLD_MIN_PROJECTED_RETURN_TO_BUY: "75"
|
||||
|
||||
|
||||
networks:
|
||||
scipio:
|
||||
name: scipio
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.scipio.networks.main }}
|
||||
|
||||
|
||||
volumes:
|
||||
scipio:
|
||||
name: datastore{{ omni_compose_apps.scipio.datastore }}
|
||||
driver: glusterfs
|
||||
|
||||
|
||||
services:
|
||||
database:
|
||||
image: mariadb:{{ omni_compose_apps.scipio.versions.database | default(omni_compose_apps.scipio.versions.default) }}
|
||||
hostname: scipio-database
|
||||
networks:
|
||||
- scipio
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.scipio.published.ports.3306 }}
|
||||
target: 3306
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
volumes:
|
||||
- type: volume
|
||||
source: scipio
|
||||
target: /var/lib/mysql
|
||||
read_only: false
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: {{ omni_compose_app_secrets.scipio.database_password }}
|
||||
MYSQL_DATABASE: scipio
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
cache:
|
||||
image: redis:{{ omni_compose_apps.scipio.versions.cache | default(omni_compose_apps.scipio.versions.default) }}
|
||||
hostname: scipio-cache
|
||||
networks:
|
||||
- scipio
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
api:
|
||||
image: scipio:{{ omni_compose_apps.scipio.versions.api | default(omni_compose_apps.scipio.versions.default) }}
|
||||
hostname: scipio-api
|
||||
depends_on:
|
||||
- database
|
||||
- cache
|
||||
networks:
|
||||
- scipio
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.scipio.published.ports.8080 }}
|
||||
target: 8080
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
environment:
|
||||
<<: *globalenv
|
||||
SCIPIO_LOG_SOURCE: api
|
||||
command: --api
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
phantom:
|
||||
image: scipio:{{ omni_compose_apps.scipio.versions.phantom | default(omni_compose_apps.scipio.versions.default) }}
|
||||
hostname: scipio-phantom
|
||||
depends_on:
|
||||
- database
|
||||
- cache
|
||||
networks:
|
||||
- scipio
|
||||
environment:
|
||||
<<: *globalenv
|
||||
SCIPIO_INTERVAL: "10"
|
||||
SCIPIO_LOG_SOURCE: phantom
|
||||
command: --phantom
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
executor:
|
||||
image: scipio:{{ omni_compose_apps.scipio.versions.executor | default(omni_compose_apps.scipio.versions.default) }}
|
||||
hostname: scipio-executor
|
||||
depends_on:
|
||||
- database
|
||||
- cache
|
||||
- phantom
|
||||
networks:
|
||||
- scipio
|
||||
environment:
|
||||
<<: *globalenv
|
||||
SCIPIO_INTERVAL: "5"
|
||||
SCIPIO_LOG_SOURCE: executor
|
||||
command: --executor
|
||||
deploy:
|
||||
replicas: 1
|
||||
|
||||
falcon:
|
||||
image: scipio:{{ omni_compose_apps.scipio.versions.falcon | default(omni_compose_apps.scipio.versions.default) }}
|
||||
hostname: scipio-falcon
|
||||
depends_on:
|
||||
- database
|
||||
- cache
|
||||
- executor
|
||||
networks:
|
||||
- scipio
|
||||
environment:
|
||||
<<: *globalenv
|
||||
SCIPIO_INTERVAL: "60"
|
||||
SCIPIO_LOG_SOURCE: falcon
|
||||
command: --falcon
|
||||
deploy:
|
||||
replicas: 1
|
||||
68
resources/docker-compose/unifi.yaml.j2
Normal file
68
resources/docker-compose/unifi.yaml.j2
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
version: "3.7"
|
||||
|
||||
|
||||
networks:
|
||||
unifi:
|
||||
name: unifi
|
||||
driver: overlay
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: {{ omni_compose_apps.unifi.networks.main }}
|
||||
|
||||
|
||||
volumes:
|
||||
unifi-data:
|
||||
name: datastore{{ omni_compose_apps.unifi.datastore }}
|
||||
driver: glusterfs
|
||||
|
||||
|
||||
services:
|
||||
wlc:
|
||||
image: jacobalberty/unifi:{{ omni_compose_apps.unifi.versions.default }}
|
||||
hostname: en1-unifi-wlc
|
||||
init: true
|
||||
networks:
|
||||
- unifi
|
||||
ports:
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.8080 }}
|
||||
target: 8080
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.8443 }}
|
||||
target: 8443
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.8843 }}
|
||||
target: 8843
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.8880 }}
|
||||
target: 8880
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.3478 }}
|
||||
target: 3478
|
||||
protocol: udp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.6789 }}
|
||||
target: 6789
|
||||
protocol: tcp
|
||||
mode: ingress
|
||||
- published: {{ omni_compose_apps.unifi.published.ports.10001 }}
|
||||
target: 10001
|
||||
protocol: udp
|
||||
mode: ingress
|
||||
volumes:
|
||||
- type: volume
|
||||
source: unifi-data
|
||||
target: /unifi
|
||||
read_only: false
|
||||
environment:
|
||||
RUNAS_UID0: "false"
|
||||
UNIFI_UID: "{{ omni_compose_apps.unifi.account.uid }}"
|
||||
UNIFI_GID: "{{ omni_compose_apps.unifi.account.uid }}"
|
||||
TZ: "Americas/New_York"
|
||||
deploy:
|
||||
replicas: 1
|
||||
7
resources/motd.j2
Normal file
7
resources/motd.j2
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
//////////// //// //// ///////////
|
||||
//// ////// //// //// ////
|
||||
//////// //// /// //// ///////////
|
||||
//// //// ////// ////
|
||||
//////////// //// //// {{ omni_description | default('Omni Network System') }}
|
||||
_______________________________{{ omni_description | default('Omni Network System') | length * '\\' }}\
|
||||
9
resources/networkd/netdev.j2
Normal file
9
resources/networkd/netdev.j2
Normal file
@@ -0,0 +1,9 @@
|
||||
# ANSIBLE MANAGED FILE - DO NOT EDIT
|
||||
[NetDev]
|
||||
Name={{ item.0.key }}
|
||||
Kind=vlan
|
||||
|
||||
[VLAN]
|
||||
Id={{ item.1 }}
|
||||
|
||||
# EOF
|
||||
27
resources/networkd/network.j2
Normal file
27
resources/networkd/network.j2
Normal file
@@ -0,0 +1,27 @@
|
||||
# ANSIBLE MANAGED FILE - DO NOT EDIT
|
||||
[Match]
|
||||
Name={{ item.key }}
|
||||
|
||||
[Network]
|
||||
DHCP={{ 'Yes' if item.value['dhcp'] | default(false) == true else 'No' }}
|
||||
IPv6AcceptRA={{ 'Yes' if item.value['dhcp6'] | default(false) == true else 'No' }}
|
||||
{% if item.value['addresses'] is defined %}
|
||||
{% for ip_addr in item.value['addresses'] %}
|
||||
Address={{ ip_addr }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if item.value['dns'] is defined %}
|
||||
{% for dns_server in item.value['dns'] %}
|
||||
DNS={{ dns_server }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if item.value['gateway'] is defined %}
|
||||
Gateway={{ item.value['gateway'] }}
|
||||
{% endif %}
|
||||
{% if item.value['vlans'] is defined %}
|
||||
{% for vlan_tag in item.value['vlans'] %}
|
||||
VLAN={{ item.key }}.{{ vlan_tag }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
# EOF
|
||||
31
resources/nginx/bitwarden.nginx.conf.j2
Normal file
31
resources/nginx/bitwarden.nginx.conf.j2
Normal file
@@ -0,0 +1,31 @@
|
||||
# Ansible managed file
|
||||
# DO NOT MANUALLY EDIT
|
||||
#
|
||||
server {
|
||||
server_name {{ omni_compose_apps.bitwarden.published.host }};
|
||||
listen 443 ssl;
|
||||
root /usr/share/nginx/html;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:{{ omni_compose_apps.bitwarden.published.8080 }}/;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/{{ omni_compose_apps.bitwarden.published.host }}/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/{{ omni_compose_apps.bitwarden.published.host }}/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
}
|
||||
|
||||
server {
|
||||
server_name {{ omni_compose_apps.bitwarden.published.host }};
|
||||
listen 80;
|
||||
root /usr/share/nginx/html;
|
||||
|
||||
if ($host = {{ omni_compose_apps.bitwarden.published.host }}) {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
return 404;
|
||||
}
|
||||
#
|
||||
# EOF
|
||||
37
resources/nginx/nginx.conf
Normal file
37
resources/nginx/nginx.conf
Normal file
@@ -0,0 +1,37 @@
|
||||
# Ansible managed file
|
||||
# DO NOT MANUALLY EDIT
|
||||
#
|
||||
user nginx;
|
||||
worker_processes auto;
|
||||
error_log /var/log/nginx/error.log;
|
||||
pid /run/nginx.pid;
|
||||
|
||||
# Load dynamic modules. See /usr/share/doc/nginx/README.dynamic.
|
||||
include /usr/share/nginx/modules/*.conf;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
log_format main '$time_local $remote_addr[$status] - $remote_addr($remote_user) - $body_bytes_sent - "$request" "$http_referer" "$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
# Load modular configuration files from the /etc/nginx/conf.d directory.
|
||||
# See http://nginx.org/en/docs/ngx_core_module.html#include
|
||||
# for more information.
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
|
||||
}
|
||||
#
|
||||
# EOF
|
||||
@@ -1,45 +0,0 @@
|
||||
---
|
||||
- name: Configure firewall for NFS
|
||||
become: true
|
||||
firewalld:
|
||||
immediate: yes
|
||||
permenant: yes
|
||||
service: nfs
|
||||
state: enabled
|
||||
zone: public
|
||||
|
||||
- name: Install NFS
|
||||
become: true
|
||||
when: ansible_distribution == "CentOS"
|
||||
yum:
|
||||
name: nfs-utils
|
||||
state: latest
|
||||
|
||||
- name: Enable NFS server
|
||||
become: true
|
||||
service:
|
||||
name: nfs-server
|
||||
enabled: true
|
||||
state: started
|
||||
|
||||
- name: Create exports directory
|
||||
become: true
|
||||
file:
|
||||
path: /share
|
||||
state: directory
|
||||
|
||||
- name: Symlink shares to exports directory
|
||||
become: true
|
||||
file:
|
||||
dest: /share/{{ item.name }}
|
||||
src: {{ item.path }}
|
||||
state: link
|
||||
|
||||
- name: Modify /etc/exports
|
||||
become: true
|
||||
lineinfile:
|
||||
path: /etc/exports
|
||||
backup: yes
|
||||
create: true
|
||||
state: present
|
||||
line: "/share/{{ item.name }} {{ item.access }}({{ item.permissions }})"
|
||||
@@ -1,31 +0,0 @@
|
||||
- name: Check system compatibility
|
||||
when: ansible_distribution != "CentOS" and ansible_distribution != "Red Hat Enterprise Linux"
|
||||
meta: end_play
|
||||
debug:
|
||||
msg: "Hypervisor deployment is only supported on CentOS and RHEL"
|
||||
|
||||
- name: Temporarily disable IUS and EPEL repositories
|
||||
become: true
|
||||
command: mv /etc/yum.repos.d/{{ item }}.repo /etc/yum.repos.d/{{ item }}.repo.bak
|
||||
with_items:
|
||||
- ius
|
||||
- epel
|
||||
|
||||
- name: Install OVirt repository
|
||||
become: true
|
||||
yum:
|
||||
name: http://resources.ovirt.org/pub/yum-repo/ovirt-release42.rpm
|
||||
state: latest
|
||||
|
||||
- name: Install OVirt Engine
|
||||
become: true
|
||||
yum:
|
||||
name: ovirt-engine
|
||||
state: latest
|
||||
|
||||
- name: Re-enable IUS and EPEL repositories
|
||||
become: true
|
||||
command: mv /etc/yum.repos.d/{{ item }}.repo.bak /etc/yum.repos.d/{{ item }}.repo
|
||||
with_items:
|
||||
- ius
|
||||
- epel
|
||||
@@ -1,65 +0,0 @@
|
||||
---
|
||||
# The dracut patch is an issue uniquely bound to the fact that I'm using several
|
||||
# old-as-shit hardware RAID cards. Specifically the Dell PERC H200 and the Dell PERC
|
||||
# H310, both of which had their hardware drivers dropped in Cent8 (despite the drivers
|
||||
# being included in the upstream fedora kernel, but whatever). OS installation and the
|
||||
# process in this set of tasks is based off of this blog post:
|
||||
# https://www.centos.org/forums/viewtopic.php?t=71862#p302447
|
||||
#
|
||||
# TODO: Host the RPMs locally. The internet may never forget, but it's also never there
|
||||
# when you need it
|
||||
|
||||
- name: Determine dracut version
|
||||
shell:
|
||||
cmd: rpm -qa | grep dracut-[0-9]
|
||||
warn: false
|
||||
register: dracut_version_check
|
||||
|
||||
- name: Install patched version of dracut
|
||||
when: dracut_version_check.stdout != "dracut-049-13.git20190614.p1.el8_0.elrepo.x86_64"
|
||||
block:
|
||||
- name: Create temporary download directory
|
||||
file:
|
||||
path: /tmp/dracut-patch
|
||||
state: directory
|
||||
|
||||
- name: Download patched dracut tool RPMs
|
||||
get_url:
|
||||
url: "{{ item.source }}"
|
||||
dest: /tmp/dracut-patch/{{ item.dest }}
|
||||
loop:
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-caps-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-caps.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-config-generic-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-config-generic.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-config-rescue-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-config-rescue.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-live-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-live.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-network-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-network.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-squash-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-squash.rpm
|
||||
- source: http://elrepo.org/people/akemi/testing/el8/dracut/dracut-tools-049-13.git20190614.p1.el8_0.elrepo.x86_64.rpm
|
||||
dest: dracut-tools.rpm
|
||||
|
||||
- name: Install patched dracut toolchain
|
||||
become: true
|
||||
dnf:
|
||||
state: latest
|
||||
name:
|
||||
- /tmp/dracut-patch/dracut.rpm
|
||||
- /tmp/dracut-patch/dracut-caps.rpm
|
||||
- /tmp/dracut-patch/dracut-config-generic.rpm
|
||||
- /tmp/dracut-patch/dracut-config-rescue.rpm
|
||||
- /tmp/dracut-patch/dracut-live.rpm
|
||||
- /tmp/dracut-patch/dracut-network.rpm
|
||||
- /tmp/dracut-patch/dracut-squash.rpm
|
||||
- /tmp/dracut-patch/dracut-tools.rpm
|
||||
|
||||
- name: Remove temporary download directory
|
||||
file:
|
||||
path: /tmp/dracut-patch
|
||||
state: absent
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
- name: Enable Extra Packages for Enterprise Linux
|
||||
become: true
|
||||
dnf:
|
||||
state: latest
|
||||
name: https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm
|
||||
|
||||
- name: Enable the power tools repository
|
||||
become: true
|
||||
lineinfile:
|
||||
path: /etc/yum.repos.d/CentOS-PowerTools.repo
|
||||
regexp: "enabled=(0|1)"
|
||||
line: "enabled=1"
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
- name: Clean DNF cache
|
||||
become: true
|
||||
when: ansible_distribution == "Fedora" or ansible_distribution == "CentOS"
|
||||
shell:
|
||||
cmd: dnf clean all
|
||||
warn: false
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
- import_tasks: centos-repos.yml
|
||||
when: ansible_distribution == "CentOS"
|
||||
|
||||
- import_tasks: clean.yml
|
||||
when: clean | default(false) == true
|
||||
|
||||
- import_tasks: update.yml
|
||||
when: update | default(false) == true
|
||||
|
||||
- name: Install packages on Fedora
|
||||
become: true
|
||||
when: ansible_distribution == "Fedora"
|
||||
dnf:
|
||||
state: latest
|
||||
name: "{{ packages_global + packages_fedora }}"
|
||||
|
||||
- name: Install packages on CentOS
|
||||
become: true
|
||||
when: ansible_distribution == "CentOS"
|
||||
dnf:
|
||||
state: latest
|
||||
name: "{{ packages_global + packages_centos }}"
|
||||
@@ -1,16 +0,0 @@
|
||||
---
|
||||
- import_tasks: centos-dracut.yml
|
||||
when: ansible_distribution == "CentOS"
|
||||
|
||||
- name: Upgrade Fedora and CentOS packages
|
||||
when: ansible_distribution == "CentOS" or ansible_distribution == "Fedora"
|
||||
become: true
|
||||
dnf:
|
||||
state: latest
|
||||
name: "*"
|
||||
exclude: "{{ ','.join(exclude | default(['kernel*'])) }}"
|
||||
|
||||
# - name: Upgrade APT packages
|
||||
# when: ansible_distribution == "Debian" or ansible_distribution == "Ubuntu"
|
||||
# become: true
|
||||
# apt:
|
||||
@@ -1,35 +0,0 @@
|
||||
---
|
||||
packages_global:
|
||||
- automake
|
||||
- cmake
|
||||
- curl
|
||||
- gcc
|
||||
- gcc-c++
|
||||
- git
|
||||
- make
|
||||
- nano
|
||||
- openssl-devel
|
||||
- systemd-devel
|
||||
- unzip
|
||||
- vim
|
||||
- vim-minimal
|
||||
|
||||
packages_fedora:
|
||||
- libselinux-python
|
||||
- git-lfs
|
||||
- readline-devel
|
||||
- policycoreutils-python
|
||||
- python-devel
|
||||
- python-virtualenv
|
||||
- python3-devel
|
||||
|
||||
packages_centos:
|
||||
- bind-utils
|
||||
- bash-completion
|
||||
- nc
|
||||
- nfs-utils
|
||||
- python3
|
||||
- python3-pip
|
||||
- python3-setuptools
|
||||
- python3-virtualenv
|
||||
- wget
|
||||
@@ -1,7 +0,0 @@
|
||||
|
||||
//////////// //// //// ///////////
|
||||
//// ////// //// //// ////
|
||||
//////// //// /// //// ///////////
|
||||
//// //// ////// ////
|
||||
//////////// //// //// {{ description | default('Omni Network System') }}
|
||||
_______________________________{{ description | default('Omni Network System') | length * '\\' }}\
|
||||
61
tasks/centos-8-kernelplus.yml
Normal file
61
tasks/centos-8-kernelplus.yml
Normal file
@@ -0,0 +1,61 @@
|
||||
---
|
||||
# This is a workaround for Cent8 removing drivers from the kernel that are required for
|
||||
# my RAID cards to work. Kernel-Plus includes the drivers, thus one of the first things
|
||||
# we need to do is to replace the kernel before doing an update.
|
||||
- name: Replace default kernel with kernel-plus on CentOS 8
|
||||
when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "8"
|
||||
become: true
|
||||
block:
|
||||
- name: Disable kernel installation from base repository
|
||||
lineinfile:
|
||||
path: /etc/yum.repos.d/CentOS-Base.repo
|
||||
line: exclude=kernel*
|
||||
|
||||
- name: Enable Centos-plus repository
|
||||
lineinfile:
|
||||
path: /etc/yum.repos.d/CentOS-centosplus.repo
|
||||
regexp: "#?enabled=(0|1)"
|
||||
line: enabled=1
|
||||
|
||||
- name: Enable kernel installation from plus repository
|
||||
lineinfile:
|
||||
path: /etc/yum.repos.d/CentOS-centosplus.repo
|
||||
line: includepkgs=kernel*
|
||||
|
||||
# Note that the order of the next four tasks is very specific and intentional
|
||||
# See this wiki page: https://plone.lucidsolutions.co.nz/linux/centos/7/install-centos-plus-kernel-kernel-plus/view
|
||||
- name: Install kernel-plus
|
||||
dnf:
|
||||
state: "{{ _runtime_update_state }}"
|
||||
name:
|
||||
- kernel-plus
|
||||
- kernel-plus-devel
|
||||
register: _dnf_kernel_plus
|
||||
|
||||
- name: Uninstall kernel-tools
|
||||
dnf:
|
||||
name:
|
||||
- kernel-tools
|
||||
- kernel-tools-libs
|
||||
state: absent
|
||||
|
||||
- name: Install kernel-plus-tools
|
||||
dnf:
|
||||
state: "{{ _runtime_update_state }}"
|
||||
name:
|
||||
- kernel-plus-tools
|
||||
- kernel-plus-tools-libs
|
||||
|
||||
- name: Reboot into new kernel
|
||||
when: _dnf_kernel_plus.changed is true and "centos.plus" not in ansible_kernel
|
||||
reboot:
|
||||
reboot_timeout: 3600
|
||||
|
||||
- name: Uninstall kernel
|
||||
dnf:
|
||||
state: absent
|
||||
name:
|
||||
- kernel
|
||||
- kernel-devel
|
||||
- kernel-core
|
||||
- kernel-modules
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
- name: Install python bindings using YUM
|
||||
become: true
|
||||
yum:
|
||||
state: latest
|
||||
name:
|
||||
- libselinux-python
|
||||
- policycoreutils-python
|
||||
- python-firewall
|
||||
@@ -1,8 +0,0 @@
|
||||
---
|
||||
- name: Install systemd-networkd
|
||||
become: true
|
||||
yum:
|
||||
state: latest
|
||||
name:
|
||||
- systemd-resolved
|
||||
- systemd-networkd
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
- name: Install global packages using YUM
|
||||
become: true
|
||||
yum:
|
||||
state: latest
|
||||
name: "{{ item }}"
|
||||
with_items:
|
||||
- "{{ packages_global }}"
|
||||
- "{{ packages_yum }}"
|
||||
@@ -1,37 +0,0 @@
|
||||
---
|
||||
- name: Enable Extra Packages for Enterprise Linux
|
||||
become: true
|
||||
yum_repository:
|
||||
name: epel
|
||||
description: Extra Packages for Enterprise Linux
|
||||
baseurl: https://download.fedoraproject.org/pub/epel/$releasever/$basearch/
|
||||
|
||||
- name: Install Extra Packages for Enterprise Linux GPG key
|
||||
become: true
|
||||
rpm_key:
|
||||
state: present
|
||||
key: https://archive.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7
|
||||
|
||||
- name: Enable Inline with Upstream Stable
|
||||
become: true
|
||||
yum:
|
||||
state: latest
|
||||
name: https://centos7.iuscommunity.org/ius-release.rpm
|
||||
|
||||
- name: Disable yum subscription-manager
|
||||
become: true
|
||||
lineinfile:
|
||||
regex: enabled=1
|
||||
line: enabled=0
|
||||
path: /etc/yum/pluginconf.d/subscription-manager.conf
|
||||
create: yes
|
||||
state: present
|
||||
|
||||
- name: Disable yum repo report upload
|
||||
become: true
|
||||
lineinfile:
|
||||
regex: enabled=1
|
||||
line: enabled=0
|
||||
path: /etc/yum/pluginconf.d/enabled_repos_upload.conf
|
||||
create: yes
|
||||
state: present
|
||||
24
tasks/docker/build.yml
Normal file
24
tasks/docker/build.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
- name: Clone repositories
|
||||
when: item.value.build is defined
|
||||
git:
|
||||
repo: "{{ item.value.build.repository }}"
|
||||
dest: /tmp/{{ item.key }}
|
||||
version: "{{ item.value.build.version }}"
|
||||
accept_hostkey: true
|
||||
loop: "{{ omni_compose_apps | dict2items }}"
|
||||
loop_control:
|
||||
label: "{{ item.key }}"
|
||||
|
||||
- name: Build image
|
||||
when: item.value.build is defined
|
||||
docker_image:
|
||||
source: build
|
||||
name: "{{ item.key }}"
|
||||
tag: "{{ item.value.build.version }}"
|
||||
build:
|
||||
path: /tmp/{{ item.key }}
|
||||
rm: true
|
||||
loop: "{{ omni_compose_apps | dict2items }}"
|
||||
loop_control:
|
||||
label: "{{ item.key }}"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user