From 8761944a683c72bca5e3a948d5f7413cbf583b66 Mon Sep 17 00:00:00 2001 From: Madhu Kanoor Date: Tue, 9 Jan 2024 20:38:15 -0500 Subject: [PATCH] feat: activations with webhooks --- poetry.lock | 142 ++++- pyproject.toml | 2 + src/aap_eda/api/filters/webhook.py | 29 + src/aap_eda/api/serializers/__init__.py | 4 + src/aap_eda/api/serializers/activation.py | 51 +- src/aap_eda/api/serializers/webhook.py | 89 +++ src/aap_eda/api/urls.py | 5 + src/aap_eda/api/views/__init__.py | 6 + src/aap_eda/api/views/activation.py | 23 + src/aap_eda/api/views/auth.py | 1 - src/aap_eda/api/views/external_webhook.py | 169 ++++++ src/aap_eda/api/views/webhook.py | 180 ++++++ src/aap_eda/core/enums.py | 1 + .../commands/create_initial_data.py | 6 + ...rmission_resource_type_webhook_and_more.py | 211 +++++++ src/aap_eda/core/models/__init__.py | 2 + src/aap_eda/core/models/activation.py | 2 + src/aap_eda/core/models/webhook.py | 125 ++++ src/aap_eda/core/validators.py | 48 ++ src/aap_eda/services/pg_notify.py | 100 ++++ src/aap_eda/settings/default.py | 18 + .../api/test_activation_with_webhook.py | 284 +++++++++ tests/integration/api/test_webhook.py | 547 ++++++++++++++++++ tools/docker/docker-compose-mac.yml | 29 +- tools/docker/docker-compose-stage.yaml | 26 +- 25 files changed, 2094 insertions(+), 6 deletions(-) create mode 100644 src/aap_eda/api/filters/webhook.py create mode 100644 src/aap_eda/api/serializers/webhook.py create mode 100644 src/aap_eda/api/views/external_webhook.py create mode 100644 src/aap_eda/api/views/webhook.py create mode 100644 src/aap_eda/core/migrations/0026_alter_permission_resource_type_webhook_and_more.py create mode 100644 src/aap_eda/core/models/webhook.py create mode 100644 src/aap_eda/services/pg_notify.py create mode 100644 tests/integration/api/test_activation_with_webhook.py create mode 100644 tests/integration/api/test_webhook.py diff --git a/poetry.lock b/poetry.lock index 9f83f3f38..418fa87a0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1580,6 +1580,29 @@ files = [ [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +[[package]] +name = "psycopg" +version = "3.1.18" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg-3.1.18-py3-none-any.whl", hash = "sha256:4d5a0a5a8590906daa58ebd5f3cfc34091377354a1acced269dd10faf55da60e"}, + {file = "psycopg-3.1.18.tar.gz", hash = "sha256:31144d3fb4c17d78094d9e579826f047d4af1da6a10427d91dfcfb6ecdf6f12b"}, +] + +[package.dependencies] +typing-extensions = ">=4.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.1.18)"] +c = ["psycopg-c (==3.1.18)"] +dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + [[package]] name = "psycopg2" version = "2.9.6" @@ -2630,6 +2653,123 @@ files = [ [package.dependencies] lxml = ">=3.8" +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + [[package]] name = "zope-interface" version = "6.0" @@ -2684,4 +2824,4 @@ dev = ["psycopg2-binary"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "1f6f71ae95883cc5c3fa8d3181f5e3113870c58fadcc2d0eca5b3dbee5268115" +content-hash = "c6688194c3e3bcaa27bc9999f04c54077655cdd932bedb03ecee5faf578f23c2" diff --git a/pyproject.toml b/pyproject.toml index 3ebf202ae..d87ae1ba4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,6 +49,8 @@ django-ansible-base = { git = "https://github.com/ansible/django-ansible-base.gi jinja2 = ">=3.1.3,<3.2" django-split-settings = "^1.2.0" pexpect = "^4.9.0" +psycopg = "*" +xxhash = "*" [tool.poetry.group.test.dependencies] pytest = "*" diff --git a/src/aap_eda/api/filters/webhook.py b/src/aap_eda/api/filters/webhook.py new file mode 100644 index 000000000..b4547c7bf --- /dev/null +++ b/src/aap_eda/api/filters/webhook.py @@ -0,0 +1,29 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import django_filters + +from aap_eda.core import models + + +class WebhookFilter(django_filters.FilterSet): + name = django_filters.CharFilter( + field_name="name", + lookup_expr="istartswith", + label="Filter by webhook name.", + ) + + class Meta: + model = models.Webhook + fields = ["name"] diff --git a/src/aap_eda/api/serializers/__init__.py b/src/aap_eda/api/serializers/__init__.py index 11a8d21ec..6dc804fbe 100644 --- a/src/aap_eda/api/serializers/__init__.py +++ b/src/aap_eda/api/serializers/__init__.py @@ -71,6 +71,7 @@ UserListSerializer, UserSerializer, ) +from .webhook import WebhookInSerializer, WebhookOutSerializer __all__ = ( # auth @@ -119,4 +120,7 @@ "EventStreamSerializer", "EventStreamCreateSerializer", "EventStreamOutSerializer", + # webhooks + "WebhookInSerializer", + "WebhookOutSerializer", ) diff --git a/src/aap_eda/api/serializers/activation.py b/src/aap_eda/api/serializers/activation.py index 4af561ccc..df3f09588 100644 --- a/src/aap_eda/api/serializers/activation.py +++ b/src/aap_eda/api/serializers/activation.py @@ -34,6 +34,7 @@ substitute_source_args, swap_sources, ) +from aap_eda.api.serializers.webhook import WebhookOutSerializer from aap_eda.core import models, validators from aap_eda.core.enums import ProcessParentType @@ -44,7 +45,7 @@ def _updated_ruleset(validated_data): try: sources_info = [] - for event_stream_id in validated_data["event_streams"]: + for event_stream_id in validated_data.get("event_streams", []): event_stream = models.EventStream.objects.get(id=event_stream_id) if event_stream.rulebook: @@ -66,6 +67,18 @@ def _updated_ruleset(validated_data): ) sources_info.append(source) + if validated_data.get("webhooks", []): + sources_info = [ + { + "name": "webhook_event_stream", + "type": "ansible.eda.pg_listener", + "args": { + "dsn": "{{ EDA_PG_NOTIFY_DSN }}", + "channels": "{{ EDA_WEBHOOK_CHANNELS }}", + }, + } + ] + return swap_sources(validated_data["rulebook_rulesets"], sources_info) except Exception as e: logger.error(f"Failed to update rulesets: {e}") @@ -87,6 +100,12 @@ class ActivationSerializer(serializers.ModelSerializer): child=EventStreamOutSerializer(), ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=WebhookOutSerializer(), + ) + class Meta: model = models.Activation fields = [ @@ -111,6 +130,7 @@ class Meta: "awx_token_id", "credentials", "event_streams", + "webhooks", ] read_only_fields = [ "id", @@ -136,6 +156,11 @@ class ActivationListSerializer(serializers.ModelSerializer): allow_null=True, child=EventStreamOutSerializer(), ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=WebhookOutSerializer(), + ) class Meta: model = models.Activation @@ -161,6 +186,7 @@ class Meta: "awx_token_id", "credentials", "event_streams", + "webhooks", ] read_only_fields = ["id", "created_at", "modified_at"] @@ -176,6 +202,10 @@ def to_representation(self, activation): EventStreamOutSerializer(event_stream).data for event_stream in activation.event_streams.all() ] + webhooks = [ + WebhookOutSerializer(webhook).data + for webhook in activation.webhooks.all() + ] return { "id": activation.id, @@ -199,6 +229,7 @@ def to_representation(self, activation): "awx_token_id": activation.awx_token_id, "credentials": credentials, "event_streams": event_streams, + "webhooks": webhooks, } @@ -219,6 +250,7 @@ class Meta: "awx_token_id", "credentials", "event_streams", + "webhooks", ] rulebook_id = serializers.IntegerField( @@ -250,6 +282,12 @@ class Meta: child=serializers.IntegerField(), validators=[validators.check_if_event_streams_exists], ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=serializers.IntegerField(), + validators=[validators.check_if_webhooks_exists], + ) def validate(self, data): user = data["user"] @@ -273,7 +311,9 @@ def create(self, validated_data): validated_data["rulebook_rulesets"] = rulebook.rulesets validated_data["git_hash"] = rulebook.project.git_hash validated_data["project_id"] = rulebook.project.id - if validated_data.get("event_streams"): + if validated_data.get("event_streams") or validated_data.get( + "webhooks" + ): validated_data["rulebook_rulesets"] = _updated_ruleset( validated_data ) @@ -354,6 +394,7 @@ class Meta: "awx_token_id", "credentials", "event_streams", + "webhooks", ] read_only_fields = ["id", "created_at", "modified_at", "restarted_at"] @@ -405,6 +446,11 @@ def to_representation(self, activation): for event_stream in activation.event_streams.all() ] + webhooks = [ + WebhookOutSerializer(webhook).data + for webhook in activation.webhooks.all() + ] + return { "id": activation.id, "name": activation.name, @@ -432,6 +478,7 @@ def to_representation(self, activation): "awx_token_id": activation.awx_token_id, "credentials": credentials, "event_streams": event_streams, + "webhooks": webhooks, } diff --git a/src/aap_eda/api/serializers/webhook.py b/src/aap_eda/api/serializers/webhook.py new file mode 100644 index 000000000..426e4f596 --- /dev/null +++ b/src/aap_eda/api/serializers/webhook.py @@ -0,0 +1,89 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from rest_framework import serializers + +from aap_eda.core import models, validators + + +class WebhookInSerializer(serializers.ModelSerializer): + user = serializers.HiddenField(default=serializers.CurrentUserDefault()) + hmac_algorithm = serializers.CharField( + default="sha256", + help_text="Hash algorithm to use", + validators=[validators.valid_hash_algorithm], + ) + hmac_format = serializers.CharField( + default="hex", + help_text="Hash format to use, hex or base64", + validators=[validators.valid_hash_format], + ) + auth_type = serializers.CharField( + default="hmac", + help_text="Auth type to use hmac or token", + validators=[validators.valid_webhook_auth_type], + ) + additional_data_headers = serializers.ListField( + required=False, + allow_null=True, + child=serializers.CharField(), + ) + + class Meta: + model = models.Webhook + fields = [ + "name", + "type", + "hmac_algorithm", + "header_key", + "auth_type", + "hmac_signature_prefix", + "hmac_format", + "user", + "secret", + "test_mode", + "additional_data_headers", + ] + + +class WebhookOutSerializer(serializers.ModelSerializer): + user = serializers.SerializerMethodField() + + class Meta: + model = models.Webhook + read_only_fields = [ + "id", + "url", + "type", + "created_at", + "modified_at", + "test_content_type", + "test_content", + "test_error_message", + ] + fields = [ + "name", + "test_mode", + "user", + "hmac_algorithm", + "header_key", + "hmac_signature_prefix", + "hmac_format", + "auth_type", + "additional_data_headers", + *read_only_fields, + ] + + def get_user(self, obj) -> str: + return f"{obj.user.username}" diff --git a/src/aap_eda/api/urls.py b/src/aap_eda/api/urls.py index b98d685c6..75466d693 100644 --- a/src/aap_eda/api/urls.py +++ b/src/aap_eda/api/urls.py @@ -44,6 +44,11 @@ ) router.register("credentials", views.CredentialViewSet) router.register("decision-environments", views.DecisionEnvironmentViewSet) +router.register("webhooks", views.WebhookViewSet) +router.register( + "external_webhook", + views.ExternalWebhookViewSet, +) openapi_urls = [ path( diff --git a/src/aap_eda/api/views/__init__.py b/src/aap_eda/api/views/__init__.py index 46e85ed1a..17b0cf4ec 100644 --- a/src/aap_eda/api/views/__init__.py +++ b/src/aap_eda/api/views/__init__.py @@ -17,9 +17,11 @@ from .credential import CredentialViewSet from .decision_environment import DecisionEnvironmentViewSet from .event_stream import EventStreamViewSet +from .external_webhook import ExternalWebhookViewSet from .project import ExtraVarViewSet, ProjectViewSet from .rulebook import AuditRuleViewSet, RulebookViewSet from .user import CurrentUserAwxTokenViewSet, CurrentUserView, UserViewSet +from .webhook import WebhookViewSet __all__ = ( # auth @@ -44,4 +46,8 @@ "DecisionEnvironmentViewSet", # event_stream "EventStreamViewSet", + # webhook + "WebhookViewSet", + # External webhook + "ExternalWebhookViewSet", ) diff --git a/src/aap_eda/api/views/activation.py b/src/aap_eda/api/views/activation.py index f66a84adb..598c34e54 100644 --- a/src/aap_eda/api/views/activation.py +++ b/src/aap_eda/api/views/activation.py @@ -13,6 +13,8 @@ # limitations under the License. import logging +import yaml +from django.conf import settings from django.shortcuts import get_object_or_404 from django_filters import rest_framework as defaultfilters from drf_spectacular.utils import ( @@ -84,6 +86,27 @@ def create(self, request): serializer.is_valid(raise_exception=True) activation = serializer.create(serializer.validated_data) + if activation.webhooks: + extra_var = {} + if activation.extra_var_id: + extra_var_obj = models.ExtraVar.objects.get( + pk=activation.extra_var_id + ) + extra_var = yaml.safe_load(extra_var_obj.extra_var) + + extra_var["EDA_WEBHOOK_CHANNELS"] = [ + webhook.channel_name for webhook in activation.webhooks.all() + ] + extra_var["EDA_PG_NOTIFY_DSN"] = settings.PG_NOTIFY_DSN + if activation.extra_var_id and extra_var_obj: + extra_var_obj.extra_var = yaml.dump(extra_var) + extra_var_obj.save() + else: + extra_var_obj = models.ExtraVar.objects.create( + extra_var=yaml.dump(extra_var) + ) + activation.extra_var_id = extra_var_obj.id + activation.save(update_fields=["extra_var_id"]) if activation.is_enabled: start_rulebook_process( diff --git a/src/aap_eda/api/views/auth.py b/src/aap_eda/api/views/auth.py index 894a127b8..3acf8c744 100644 --- a/src/aap_eda/api/views/auth.py +++ b/src/aap_eda/api/views/auth.py @@ -138,7 +138,6 @@ def get_serializer_class(self): def retrieve(self, _request, pk=None): # TODO: Optimization by querying to retrieve desired permission format role = get_object_or_404(self.queryset, pk=pk) - detail_serialzer = self.get_serializer_class() role = detail_serialzer(role).data result = display_permissions(role) diff --git a/src/aap_eda/api/views/external_webhook.py b/src/aap_eda/api/views/external_webhook.py new file mode 100644 index 000000000..00913ab98 --- /dev/null +++ b/src/aap_eda/api/views/external_webhook.py @@ -0,0 +1,169 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import hmac +import logging +import urllib.parse + +import yaml +from django.conf import settings +from django.http.request import HttpHeaders +from drf_spectacular.utils import extend_schema +from rest_framework import status, viewsets +from rest_framework.decorators import action +from rest_framework.exceptions import AuthenticationFailed, ParseError +from rest_framework.permissions import AllowAny +from rest_framework.response import Response + +from aap_eda.core.enums import Action, ResourceType +from aap_eda.core.models import Webhook +from aap_eda.services.pg_notify import PGNotify + +logger = logging.getLogger(__name__) + + +class ExternalWebhookViewSet(viewsets.GenericViewSet): + queryset = Webhook.objects.all() + rbac_action = None + rbac_resource_type = ResourceType.WEBHOOK + permission_classes = [AllowAny] + authentication_classes = [] + + def get_rbac_permission(self): + if self.action == "post": + return ResourceType.WEBHOOK, Action.READ + + def _update_test_data( + self, + error_message: str = "", + content_type: str = "", + content: str = "", + ): + logger.warning( + f"The webhook: {self.webhook.name} is currently in test mode" + ) + self.webhook.test_error_message = error_message + self.webhook.test_content_type = content_type + self.webhook.test_content = content + self.webhook.save( + update_fields=[ + "test_content_type", + "test_content", + "test_error_message", + ] + ) + + def _parse_body(self, content_type: str, body: bytes) -> dict: + if content_type == "application/json": + data = yaml.safe_load(body.decode()) + elif content_type == "application/x-www-form-urlencoded": + data = urllib.parse.parse_qs(body.decode()) + else: + try: + data = yaml.safe_load(body.decode()) + except Exception: + message = f"Invalid content type passed {content_type}" + logger.error(message) + raise ParseError(message) + return data + + def _embellish_data(self, headers: HttpHeaders, data: dict): + data["eda_webhook_name"] = self.webhook.name + if not self.webhook.additional_data_headers: + return + for key in self.webhook.additional_data_headers: + value = headers.get(key) + if value: + data[key] = value + + def _hmac_auth(self, body: bytes, signature: str): + hash_object = hmac.new( + self.webhook.secret.get_secret_value().encode("utf-8"), + msg=body, + digestmod=self.webhook.hmac_algorithm, + ) + if self.webhook.hmac_format == "hex": + expected_signature = ( + self.webhook.hmac_signature_prefix + hash_object.hexdigest() + ) + elif self.webhook.hmac_format == "base64": + expected_signature = ( + self.webhook.hmac_signature_prefix + + base64.b64encode(hash_object.digest()).decode() + ) + + if not hmac.compare_digest(expected_signature, signature): + message = "Signature mismatch, check your payload and secret" + logger.warning(message) + if self.webhook.test_mode: + self._update_test_data(error_message=message) + raise AuthenticationFailed(message) + + def _token_auth(self, token: str): + if token.startswith("Bearer"): + token = token.split("Bearer ")[1] + if self.webhook.secret.get_secret_value() != token: + message = "Token mismatch, check your token" + logger.warning(message) + if self.webhook.test_mode: + self._update_test_data(error_message=message) + raise AuthenticationFailed(message) + + @extend_schema(exclude=True) + @action(detail=True, methods=["POST"], rbac_action=None) + def post(self, request, *args, **kwargs): + try: + self.webhook = Webhook.objects.get(uuid=kwargs["pk"]) + except Webhook.DoesNotExist: + raise ParseError("bad uuid specified") + + logger.debug(f"Headers {request.headers}") + logger.debug(f"Body {request.body}") + if self.webhook.header_key not in request.headers: + message = f"{self.webhook.header_key} header is missing" + logger.error(message) + if self.webhook.test_mode: + self._update_test_data(error_message=message) + raise ParseError(message) + + if self.webhook.auth_type == "hmac": + self._hmac_auth( + request.body, request.headers[self.webhook.header_key] + ) + elif self.webhook.auth_type == "token": + self._token_auth(request.headers[self.webhook.header_key]) + + data = self._parse_body( + request.headers.get("Content-Type", ""), request.body + ) + logger.debug(f"Data: {data}") + self._embellish_data(request.headers, data) + if self.webhook.test_mode: + self._update_test_data( + content=yaml.dump(data), + content_type=request.headers.get("Content-Type", "unknown"), + ) + else: + try: + PGNotify( + settings.PG_NOTIFY_DSN_SERVER, + self.webhook.channel_name, + data, + )() + except Exception as e: + logger.error(e) + return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + return Response(status=status.HTTP_200_OK) diff --git a/src/aap_eda/api/views/webhook.py b/src/aap_eda/api/views/webhook.py new file mode 100644 index 000000000..65cf40d7f --- /dev/null +++ b/src/aap_eda/api/views/webhook.py @@ -0,0 +1,180 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from django.conf import settings +from django.shortcuts import get_object_or_404 +from django_filters import rest_framework as defaultfilters +from drf_spectacular.utils import OpenApiResponse, extend_schema +from rest_framework import mixins, status, viewsets +from rest_framework.response import Response + +from aap_eda.api import serializers +from aap_eda.api.filters.webhook import WebhookFilter +from aap_eda.core import models +from aap_eda.core.enums import ResourceType + +logger = logging.getLogger(__name__) + + +class WebhookViewSet( + mixins.CreateModelMixin, + viewsets.ReadOnlyModelViewSet, + mixins.DestroyModelMixin, +): + queryset = models.Webhook.objects.order_by("-created_at") + filter_backends = (defaultfilters.DjangoFilterBackend,) + filterset_class = WebhookFilter + rbac_resource_type = ResourceType.WEBHOOK + + def get_serializer_class(self): + if self.action == "list": + return serializers.WebhookOutSerializer + elif self.action == "destroy": + return serializers.WebhookOutSerializer + elif self.action == "create": + return serializers.WebhookInSerializer + elif self.action == "partial_update": + return serializers.WebhookInSerializer + + return serializers.WebhookOutSerializer + + def get_response_serializer_class(self): + return serializers.WebhookOutSerializer + + @extend_schema( + description="Get the Webhook by its id", + responses={ + status.HTTP_200_OK: OpenApiResponse( + serializers.WebhookOutSerializer, + description="Return the webhook by its id.", + ), + }, + ) + def retrieve(self, request, pk: int): + webhook = get_object_or_404(models.Webhook, pk=pk) + return Response(serializers.WebhookOutSerializer(webhook).data) + + @extend_schema( + description="Delete a Webhook by its id", + responses={ + status.HTTP_204_NO_CONTENT: OpenApiResponse( + None, description="Delete successful." + ) + }, + ) + def destroy(self, request, *args, **kwargs): + webhook = self.get_object() + self.perform_destroy(webhook) + return Response(status=status.HTTP_204_NO_CONTENT) + + @extend_schema( + description="List all webhooks", + responses={ + status.HTTP_200_OK: OpenApiResponse( + serializers.WebhookOutSerializer(many=True), + description="Return a list of webhook.", + ), + }, + ) + def list(self, request): + webhooks = models.Webhook.objects.all() + webhooks = self.filter_queryset(webhooks) + + serializer = serializers.WebhookOutSerializer(webhooks, many=True) + result = self.paginate_queryset(serializer.data) + + return self.get_paginated_response(result) + + @extend_schema( + request=serializers.WebhookInSerializer, + responses={ + status.HTTP_201_CREATED: OpenApiResponse( + serializers.WebhookOutSerializer, + description="Return the new webhook.", + ), + status.HTTP_400_BAD_REQUEST: OpenApiResponse( + description="Invalid data to create webhook." + ), + }, + ) + def create(self, request): + context = {"request": request} + serializer = serializers.WebhookInSerializer( + data=request.data, + context=context, + ) + serializer.is_valid(raise_exception=True) + response = serializer.create(serializer.validated_data) + + response.url = f"{settings.WEBHOOK_URL_PREFIX}/{response.uuid}/post/" + response.save(update_fields=["url"]) + + return Response( + serializers.WebhookOutSerializer(response).data, + status=status.HTTP_201_CREATED, + ) + + @extend_schema( + request=serializers.WebhookInSerializer, + responses={ + status.HTTP_200_OK: OpenApiResponse( + serializers.WebhookOutSerializer, + description="Update successful, return the new webhook.", + ), + status.HTTP_400_BAD_REQUEST: OpenApiResponse( + description="Unable to update webhook." + ), + }, + ) + def partial_update(self, request, *args, **kwargs): + webhook = get_object_or_404(models.Webhook, pk=kwargs["pk"]) + context = {"request": request} + serializer = serializers.WebhookInSerializer( + webhook, + data=request.data, + context=context, + partial=True, + ) + serializer.is_valid(raise_exception=True) + if webhook.test_mode != request.data.get( + "test_mode", webhook.test_mode + ): + webhook.test_content_type = "" + webhook.test_content = "" + webhook.test_error_message = "" + + if "secret" in request.data: + webhook.secret = request.data.get("secret") + + webhook.test_mode = request.data.get("test_mode", webhook.test_mode) + webhook.name = request.data.get("name", webhook.name) + webhook.hmac_signature_prefix = request.data.get( + "hmac_signature_prefix", webhook.hmac_signature_prefix + ) + webhook.hmac_algorithm = request.data.get( + "hmac_algorithm", webhook.hmac_algorithm + ) + webhook.header_key = request.data.get("header_key", webhook.header_key) + webhook.hmac_format = request.data.get( + "hmac_format", webhook.hmac_format + ) + + webhook.save() + + return Response( + serializers.WebhookOutSerializer(webhook).data, + status=status.HTTP_200_OK, + ) diff --git a/src/aap_eda/core/enums.py b/src/aap_eda/core/enums.py index 27c585b59..79ed5a16f 100644 --- a/src/aap_eda/core/enums.py +++ b/src/aap_eda/core/enums.py @@ -50,6 +50,7 @@ class ResourceType(DjangoStrEnum): DECISION_ENVIRONMENT = "decision_environment" CREDENTIAL = "credential" EVENT_STREAM = "event_stream" + WEBHOOK = "webhook" class Action(DjangoStrEnum): diff --git a/src/aap_eda/core/management/commands/create_initial_data.py b/src/aap_eda/core/management/commands/create_initial_data.py index 9a39b37b1..0910f5c5e 100644 --- a/src/aap_eda/core/management/commands/create_initial_data.py +++ b/src/aap_eda/core/management/commands/create_initial_data.py @@ -44,6 +44,7 @@ "decision_environment": ["create", "read", "update", "delete"], "credential": ["create", "read", "update", "delete"], "event_stream": ["create", "read"], + "webhook": ["create", "read", "update", "delete"], }, }, { @@ -60,6 +61,7 @@ "decision_environment": ["create", "read", "update", "delete"], "credential": ["create", "read", "update", "delete"], "event_stream": ["create", "read"], + "webhook": ["create", "read", "update", "delete"], }, }, { @@ -88,6 +90,7 @@ "decision_environment": ["create", "read", "update", "delete"], "credential": ["create", "read", "update", "delete"], "event_stream": ["create", "read"], + "webhook": ["create", "read"], }, }, { @@ -107,6 +110,7 @@ "decision_environment": ["read"], "credential": ["read"], "event_stream": ["read"], + "webhook": ["read"], }, }, { @@ -125,6 +129,7 @@ "decision_environment": ["read"], "credential": ["read"], "event_stream": ["read"], + "webhook": ["read"], }, }, { @@ -140,6 +145,7 @@ "rulebook": ["read"], "decision_environment": ["read"], "event_stream": ["read"], + "webhook": ["read"], }, }, ] diff --git a/src/aap_eda/core/migrations/0026_alter_permission_resource_type_webhook_and_more.py b/src/aap_eda/core/migrations/0026_alter_permission_resource_type_webhook_and_more.py new file mode 100644 index 000000000..a59c2f49a --- /dev/null +++ b/src/aap_eda/core/migrations/0026_alter_permission_resource_type_webhook_and_more.py @@ -0,0 +1,211 @@ +# Generated by Django 4.2.7 on 2024-02-23 20:09 + +import uuid + +import django.contrib.postgres.fields +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + +import aap_eda.core.utils.crypto.fields + +PERMISSIONS = { + "webhook": ["create", "read", "update", "delete"], +} + + +def insert_permissions(apps, schema_editor): + permission_model = apps.get_model("core", "Permission") + db_alias = schema_editor.connection.alias + permissions = [] + for resource_type, actions in PERMISSIONS.items(): + for action in actions: + permissions.append( + permission_model(resource_type=resource_type, action=action) + ) + permission_model.objects.using(db_alias).bulk_create(permissions) + + +def drop_permissions(apps, schema_editor): + permission_model = apps.get_model("core", "Permission") # noqa: N806 + db_alias = schema_editor.connection.alias + for resource_type, actions in PERMISSIONS.items(): + permission_model.objects.using(db_alias).filter( + resource_type=resource_type, action__in=actions + ).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0025_rename_args_eventstream_source_args"), + ] + + operations = [ + migrations.AlterField( + model_name="permission", + name="resource_type", + field=models.TextField( + choices=[ + ("activation", "activation"), + ("activation_instance", "activation_instance"), + ("audit_rule", "audit_rule"), + ("audit_event", "audit_event"), + ("user", "user"), + ("project", "project"), + ("extra_var", "extra_var"), + ("rulebook", "rulebook"), + ("role", "role"), + ("decision_environment", "decision_environment"), + ("credential", "credential"), + ("event_stream", "event_stream"), + ("webhook", "webhook"), + ] + ), + ), + migrations.CreateModel( + name="Webhook", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + models.TextField( + help_text="The name of the webhook", unique=True + ), + ), + ( + "type", + models.TextField( + default="generic", help_text="The type of the webhook" + ), + ), + ( + "secret", + aap_eda.core.utils.crypto.fields.EncryptedTextField( + help_text="The secret for the webhook" + ), + ), + ( + "auth_type", + models.TextField( + default="hmac", + help_text="The Authentication method to use, hmac or token", + ), + ), + ( + "header_key", + models.TextField( + default="X-Hub-Signature-256", + help_text="The HTTP header which will contain the HMAC Signature or Token", + ), + ), + ( + "hmac_algorithm", + models.TextField( + default="sha256", + help_text="The algorithm to use for HMAC verification", + ), + ), + ( + "hmac_format", + models.TextField( + default="hex", + help_text="The format of the signature can be base64 or hex", + ), + ), + ( + "hmac_signature_prefix", + models.TextField( + default="", help_text="The prefix in the signature" + ), + ), + ( + "additional_data_headers", + django.contrib.postgres.fields.ArrayField( + base_field=models.TextField( + help_text="The HTTP header" + ), + blank=True, + help_text="The additional http headers which will be added to the event data", + null=True, + size=None, + ), + ), + ( + "test_mode", + models.BooleanField( + default=False, help_text="Enable test mode" + ), + ), + ( + "test_content_type", + models.TextField( + default="", + help_text="The content type of test data, when in test mode", + null=True, + ), + ), + ( + "test_content", + models.TextField( + default="", + help_text="The content recieved, when in test mode, stored as a yaml string", + null=True, + ), + ), + ( + "test_error_message", + models.TextField( + default="", + help_text="The error message, when in test mode", + null=True, + ), + ), + ("uuid", models.UUIDField(default=uuid.uuid4)), + ( + "url", + models.TextField( + help_text="The URL which will be used to post the data to the webhook" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.ForeignKey( + help_text="The user who created the webhook", + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "db_table": "core_webhook", + }, + ), + migrations.AddField( + model_name="activation", + name="webhooks", + field=models.ManyToManyField(default=None, to="core.webhook"), + ), + migrations.AddIndex( + model_name="webhook", + index=models.Index(fields=["id"], name="ix_webhook_id"), + ), + migrations.AddIndex( + model_name="webhook", + index=models.Index(fields=["name"], name="ix_webhook_name"), + ), + migrations.AddIndex( + model_name="webhook", + index=models.Index(fields=["uuid"], name="ix_webhook_uuid"), + ), + migrations.RunPython(insert_permissions, drop_permissions), + ] diff --git a/src/aap_eda/core/models/__init__.py b/src/aap_eda/core/models/__init__.py index c377653d9..606c82503 100644 --- a/src/aap_eda/core/models/__init__.py +++ b/src/aap_eda/core/models/__init__.py @@ -36,6 +36,7 @@ ) from .rulebook_process import RulebookProcess, RulebookProcessLog from .user import AwxToken, User +from .webhook import Webhook __all__ = [ "ActivationInstanceJobInstance", @@ -62,4 +63,5 @@ "DecisionEnvironment", "ActivationRequestQueue", "EventStream", + "Webhook", ] diff --git a/src/aap_eda/core/models/activation.py b/src/aap_eda/core/models/activation.py index 2c7224af8..666ea3428 100644 --- a/src/aap_eda/core/models/activation.py +++ b/src/aap_eda/core/models/activation.py @@ -19,6 +19,7 @@ from .mixins import StatusHandlerModelMixin from .user import AwxToken, User +from .webhook import Webhook __all__ = ("Activation",) @@ -102,3 +103,4 @@ class Meta: "EventStream", default=None, ) + webhooks = models.ManyToManyField(Webhook, default=None) diff --git a/src/aap_eda/core/models/webhook.py b/src/aap_eda/core/models/webhook.py new file mode 100644 index 000000000..f98526c2b --- /dev/null +++ b/src/aap_eda/core/models/webhook.py @@ -0,0 +1,125 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid + +from django.contrib.postgres.fields import ArrayField +from django.db import models + +from aap_eda.core.utils.crypto.fields import EncryptedTextField + +__all__ = "Webhook" + +EDA_WEBHOOK_CHANNEL_PREFIX = "eda_webhook_" + + +class Webhook(models.Model): + class Meta: + db_table = "core_webhook" + indexes = [ + models.Index(fields=["id"], name="ix_webhook_id"), + models.Index(fields=["name"], name="ix_webhook_name"), + models.Index(fields=["uuid"], name="ix_webhook_uuid"), + ] + + name = models.TextField( + null=False, unique=True, help_text="The name of the webhook" + ) + type = models.TextField( + null=False, help_text="The type of the webhook", default="generic" + ) + secret = EncryptedTextField( + null=False, help_text="The secret for the webhook" + ) + auth_type = models.TextField( + null=False, + default="hmac", + help_text="The Authentication method to use, hmac or token", + ) + header_key = models.TextField( + null=False, + default="X-Hub-Signature-256", + help_text=( + "The HTTP header which will contain the HMAC " "Signature or Token" + ), + ) + hmac_algorithm = models.TextField( + null=False, + default="sha256", + help_text="The algorithm to use for HMAC verification", + ) + hmac_format = models.TextField( + null=False, + default="hex", + help_text="The format of the signature can be base64 or hex", + ) + hmac_signature_prefix = models.TextField( + null=False, default="", help_text="The prefix in the signature" + ) + + additional_data_headers = ArrayField( + models.TextField(help_text="The HTTP header"), + null=True, + blank=True, + help_text=( + "The additional http headers which will " + "be added to the event data" + ), + ) + + test_mode = models.BooleanField( + default=False, help_text="Enable test mode" + ) + + test_content_type = models.TextField( + null=True, + default="", + help_text="The content type of test data, when in test mode", + ) + test_content = models.TextField( + null=True, + default="", + help_text=( + "The content recieved, when in test mode, " + "stored as a yaml string" + ), + ) + test_error_message = models.TextField( + null=True, + default="", + help_text="The error message, when in test mode", + ) + + user = models.ForeignKey( + "User", + on_delete=models.CASCADE, + null=False, + help_text="The user who created the webhook", + ) + uuid = models.UUIDField(default=uuid.uuid4) + url = models.TextField( + null=False, + help_text="The URL which will be used to post the data to the webhook", + ) + created_at = models.DateTimeField(auto_now_add=True, null=False) + modified_at = models.DateTimeField(auto_now=True, null=False) + + def _get_channel_name(self) -> str: + """Generate the channel name based on the UUID and prefix.""" + return ( + f"{EDA_WEBHOOK_CHANNEL_PREFIX}" + f"{str(self.uuid).replace('-','_')}" + ) + + channel_name = property(_get_channel_name) diff --git a/src/aap_eda/core/validators.py b/src/aap_eda/core/validators.py index 47f755e01..07af2b88d 100644 --- a/src/aap_eda/core/validators.py +++ b/src/aap_eda/core/validators.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import hashlib import logging import typing as tp @@ -21,6 +22,10 @@ logger = logging.getLogger(__name__) +EDA_VALID_HASH_FORMATS = ("hex", "base64") + +EDA_VALID_WEBHOOK_AUTH_TYPES = ("hmac", "token") + def check_if_rulebook_exists(rulebook_id: int) -> int: try: @@ -120,3 +125,46 @@ def check_if_event_streams_exists(event_stream_ids: list[int]) -> list[int]: f"EventStream with id {event_stream_id} does not exist" ) return event_stream_ids + + +def valid_hash_algorithm(algo: str): + if algo not in hashlib.algorithms_available: + raise serializers.ValidationError( + ( + f"Invalid hash algorithm {algo} should " + f"be one of {hashlib.algorithms_available}" + ) + ) + + +def valid_hash_format(fmt: str): + if fmt not in EDA_VALID_HASH_FORMATS: + raise serializers.ValidationError( + ( + f"Invalid hash format {fmt} should " + f"be one of {EDA_VALID_HASH_FORMATS}" + ) + ) + return fmt + + +def valid_webhook_auth_type(auth_type: str): + if auth_type not in EDA_VALID_WEBHOOK_AUTH_TYPES: + raise serializers.ValidationError( + ( + f"Invalid auth_type{auth_type} should " + f"be one of {EDA_VALID_WEBHOOK_AUTH_TYPES}" + ) + ) + return auth_type + + +def check_if_webhooks_exists(webhook_ids: list[int]) -> list[int]: + for webhook_id in webhook_ids: + try: + models.Webhook.objects.get(pk=webhook_id) + except models.Webhook.DoesNotExist: + raise serializers.ValidationError( + f"Webhook with id {webhook_id} does not exist" + ) + return webhook_ids diff --git a/src/aap_eda/services/pg_notify.py b/src/aap_eda/services/pg_notify.py new file mode 100644 index 000000000..939f28dba --- /dev/null +++ b/src/aap_eda/services/pg_notify.py @@ -0,0 +1,100 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import uuid + +import psycopg +import xxhash +from django.conf import settings + +logger = logging.getLogger(__name__) + +MAX_MESSAGE_LENGTH = settings.MAX_PG_NOTIFY_MESSAGE_SIZE +MESSAGE_CHUNKED_UUID = "_message_chunked_uuid" +MESSAGE_CHUNK_COUNT = "_message_chunk_count" +MESSAGE_CHUNK_SEQUENCE = "_message_chunk_sequence" +MESSAGE_CHUNK = "_chunk" +MESSAGE_LENGTH = "_message_length" +MESSAGE_XX_HASH = "_message_xx_hash" + + +class PGNotify: + """The PGNotify action sends an event to a PG Pub Sub Channel. + + Needs + dsn https://www.postgresql.org/docs/current/libpq-connect.html + #LIBPQ-CONNSTRING-KEYWORD-VALUE + channel the channel name to send the notifies + event + """ + + def __init__(self, dsn: str, channel: str, data: dict): + self.dsn = dsn + self.channel = channel + self.data = data + + def __call__(self): + try: + with psycopg.connect( + conninfo=self.dsn, + autocommit=True, + ) as conn: + with conn.cursor() as cursor: + payload = json.dumps(self.data) + message_length = len(payload) + logger.debug(f"Message length {message_length}") + if message_length >= MAX_MESSAGE_LENGTH: + xx_hash = xxhash.xxh32( + payload.encode("utf-8") + ).hexdigest() + logger.debug("Message length exceeds, will chunk") + message_uuid = str(uuid.uuid4()) + number_of_chunks = ( + int(message_length / MAX_MESSAGE_LENGTH) + 1 + ) + chunked = { + MESSAGE_CHUNKED_UUID: message_uuid, + MESSAGE_CHUNK_COUNT: number_of_chunks, + MESSAGE_LENGTH: message_length, + MESSAGE_XX_HASH: xx_hash, + } + logger.debug(f"Chunk info {message_uuid}") + logger.debug(f"Number of chunks {number_of_chunks}") + logger.debug(f"Total data size {message_length}") + logger.debug(f"XX Hash {xx_hash}") + + sequence = 1 + for i in range(0, message_length, MAX_MESSAGE_LENGTH): + chunked[MESSAGE_CHUNK] = payload[ + i : i + MAX_MESSAGE_LENGTH + ] + chunked[MESSAGE_CHUNK_SEQUENCE] = sequence + sequence += 1 + + logger.debug( + f"Chunked Length {len(json.dumps(chunked))}" + ) + cursor.execute( + f"NOTIFY {self.channel}, " + f"'{json.dumps(chunked)}';" + ) + else: + cursor.execute( + f"NOTIFY {self.channel}, " f"'{payload}';" + ) + except psycopg.OperationalError as e: + logger.error(f"PG Notify operational error {e}") + raise e diff --git a/src/aap_eda/settings/default.py b/src/aap_eda/settings/default.py index 9bd888c37..67e456b5a 100644 --- a/src/aap_eda/settings/default.py +++ b/src/aap_eda/settings/default.py @@ -499,3 +499,21 @@ def _get_secret_key() -> str: PG_NOTIFY_DSN = settings.get("PG_NOTIFY_DSN", _DEFAULT_PG_NOTIFY_DSN) PG_NOTIFY_TEMPLATE_RULEBOOK = settings.get("PG_NOTIFY_TEMPLATE_RULEBOOK", None) +_DEFAULT_PG_NOTIFY_DSN_SERVER = ( + f"host={DATABASES['default']['HOST']} " + f"port={DATABASES['default']['PORT']} " + f"dbname={DATABASES['default']['NAME']} " + f"user={DATABASES['default']['USER']} " + f"password={DATABASES['default']['PASSWORD']}" +) +PG_NOTIFY_DSN_SERVER = settings.get( + "PG_NOTIFY_DSN_SERVER", _DEFAULT_PG_NOTIFY_DSN_SERVER +) +SERVER_UUID = settings.get("SERVER_UUID", "abc-def-123-34567") +WEBHOOK_URL_PREFIX = settings.get( + "WEBHOOK_URL_PREFIX", + f"https://ui.eda.local:8443/{SERVER_UUID}/api/eda/v1/external_webhook", +) +MAX_PG_NOTIFY_MESSAGE_SIZE = int( + settings.get("MAX_PG_NOTIFY_MESSAGE_SIZE", 6144) +) diff --git a/tests/integration/api/test_activation_with_webhook.py b/tests/integration/api/test_activation_with_webhook.py new file mode 100644 index 000000000..c99c4927d --- /dev/null +++ b/tests/integration/api/test_activation_with_webhook.py @@ -0,0 +1,284 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import uuid + +import pytest +import yaml +from rest_framework import status +from rest_framework.test import APIClient + +from aap_eda.core import models +from aap_eda.core.enums import ( + ACTIVATION_STATUS_MESSAGE_MAP, + ActivationStatus, + RestartPolicy, +) +from tests.integration.constants import api_url_v1 + +TEST_ACTIVATION = { + "name": "test-activation", + "description": "test activation", + "is_enabled": True, + "decision_environment_id": 1, + "project_id": 1, + "rulebook_id": 1, + "extra_var_id": 1, + "restart_policy": RestartPolicy.ON_FAILURE, + "restart_count": 0, + "status_message": "", +} + +TEST_AWX_TOKEN = { + "name": "test-awx-token", + "description": "test AWX token", + "token": "abc123xyx", +} + +PROJECT_GIT_HASH = "684f62df18ce5f8d5c428e53203b9b975426eed0" + +TEST_PROJECT = { + "git_hash": PROJECT_GIT_HASH, + "name": "test-project-01", + "url": "https://git.example.com/acme/project-01", + "description": "test project", +} + +TEST_RULEBOOK = { + "name": "test-rulebook.yml", + "description": "test rulebook", +} + +TEST_DECISION_ENV = { + "name": "test-de", + "description": "test de", + "image_url": "quay.io/ansible/ansible-rulebook", +} + +TEST_RULESETS = """ +--- +- name: hello + hosts: localhost + gather_facts: false + sources: + - name: demo + ansible.eda.range: + limit: 10 + rules: + - name: rule1 + condition: true + action: + debug: +""" + +TEST_EXTRA_VAR = """ +--- +collections: + - community.general + - benthomasson.eda +""" + + +def create_activation_related_data(with_project=True): + user = models.User.objects.create_user( + username="luke.skywalker", + first_name="Luke", + last_name="Skywalker", + email="luke.skywalker@example.com", + password="secret", + ) + user_id = user.pk + models.AwxToken.objects.create( + name=TEST_AWX_TOKEN["name"], + token=TEST_AWX_TOKEN["token"], + user=user, + ) + credential_id = models.Credential.objects.create( + name="test-credential", + description="test credential", + credential_type="Container Registry", + username="dummy-user", + secret="dummy-password", + ).pk + decision_environment_id = models.DecisionEnvironment.objects.create( + name=TEST_DECISION_ENV["name"], + image_url=TEST_DECISION_ENV["image_url"], + description=TEST_DECISION_ENV["description"], + credential_id=credential_id, + ).pk + project_id = ( + models.Project.objects.create( + git_hash=TEST_PROJECT["git_hash"], + name=TEST_PROJECT["name"], + url=TEST_PROJECT["url"], + description=TEST_PROJECT["description"], + ).pk + if with_project + else None + ) + rulebook_id = ( + models.Rulebook.objects.create( + name=TEST_RULEBOOK["name"], + rulesets=TEST_RULESETS, + description=TEST_RULEBOOK["description"], + project_id=project_id, + ).pk + if with_project + else None + ) + extra_var_id = models.ExtraVar.objects.create(extra_var=TEST_EXTRA_VAR).pk + + webhook1 = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook1", + user=user, + ) + webhook2 = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook2", + user=user, + ) + return { + "user_id": user_id, + "decision_environment_id": decision_environment_id, + "project_id": project_id, + "rulebook_id": rulebook_id, + "extra_var_id": extra_var_id, + "credential_id": credential_id, + "webhooks": [webhook1, webhook2], + } + + +def create_activation(fks: dict): + activation_data = TEST_ACTIVATION.copy() + activation_data["decision_environment_id"] = fks["decision_environment_id"] + activation_data["project_id"] = fks["project_id"] + activation_data["rulebook_id"] = fks["rulebook_id"] + activation_data["extra_var_id"] = fks["extra_var_id"] + activation_data["user_id"] = fks["user_id"] + activation = models.Activation(**activation_data) + activation.save() + for webhook in fks["webhooks"]: + activation.webhooks.add(webhook) + + return activation + + +@pytest.mark.django_db +def test_create_activation_with_webhooks(client: APIClient): + fks = create_activation_related_data() + test_activation = TEST_ACTIVATION.copy() + test_activation["decision_environment_id"] = fks["decision_environment_id"] + test_activation["rulebook_id"] = fks["rulebook_id"] + test_activation["extra_var_id"] = fks["extra_var_id"] + test_activation["webhooks"] = [webhook.id for webhook in fks["webhooks"]] + + client.post(f"{api_url_v1}/users/me/awx-tokens/", data=TEST_AWX_TOKEN) + response = client.post(f"{api_url_v1}/activations/", data=test_activation) + assert response.status_code == status.HTTP_201_CREATED + data = response.data + activation = models.Activation.objects.filter(id=data["id"]).first() + if activation.project: + assert data["project"] == {"id": activation.project.id, **TEST_PROJECT} + else: + assert not data["project"] + if activation.rulebook: + assert data["rulebook"] == { + "id": activation.rulebook.id, + **TEST_RULEBOOK, + } + else: + assert not data["rulebook"] + assert data["decision_environment"] == { + "id": activation.decision_environment.id, + **TEST_DECISION_ENV, + } + assert data["extra_var"] == { + "id": activation.extra_var.id, + } + assert activation.rulebook_name == TEST_RULEBOOK["name"] + swapped_ruleset = yaml.safe_load(activation.rulebook_rulesets) + assert list(swapped_ruleset[0]["sources"][0].keys()) == [ + "ansible.eda.pg_listener", + "name", + ] + assert data["restarted_at"] is None + assert activation.status == ActivationStatus.PENDING + assert ( + activation.status_message + == ACTIVATION_STATUS_MESSAGE_MAP[activation.status] + ) + assert data["webhooks"][0]["name"] == "test-webhook1" + assert data["webhooks"][1]["name"] == "test-webhook2" + + +@pytest.mark.django_db +def test_create_activation_with_bad_webhook(client: APIClient): + fks = create_activation_related_data() + test_activation = TEST_ACTIVATION.copy() + test_activation["decision_environment_id"] = fks["decision_environment_id"] + test_activation["rulebook_id"] = fks["rulebook_id"] + test_activation["extra_var_id"] = fks["extra_var_id"] + test_activation["webhooks"] = [1492] + + client.post(f"{api_url_v1}/users/me/awx-tokens/", data=TEST_AWX_TOKEN) + response = client.post(f"{api_url_v1}/activations/", data=test_activation) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + str(response.data["webhooks"][0]) + == "Webhook with id 1492 does not exist" + ) + + +@pytest.mark.django_db +def test_create_activation_with_webhooks_no_extravar(client: APIClient): + fks = create_activation_related_data() + test_activation = TEST_ACTIVATION.copy() + test_activation["decision_environment_id"] = fks["decision_environment_id"] + test_activation["rulebook_id"] = fks["rulebook_id"] + test_activation["webhooks"] = [webhook.id for webhook in fks["webhooks"]] + test_activation.pop("extra_var_id") + client.post(f"{api_url_v1}/users/me/awx-tokens/", data=TEST_AWX_TOKEN) + response = client.post(f"{api_url_v1}/activations/", data=test_activation) + assert response.status_code == status.HTTP_201_CREATED + data = response.data + activation = models.Activation.objects.filter(id=data["id"]).first() + if activation.project: + assert data["project"] == {"id": activation.project.id, **TEST_PROJECT} + else: + assert not data["project"] + if activation.rulebook: + assert data["rulebook"] == { + "id": activation.rulebook.id, + **TEST_RULEBOOK, + } + else: + assert not data["rulebook"] + assert data["decision_environment"] == { + "id": activation.decision_environment.id, + **TEST_DECISION_ENV, + } + assert activation.rulebook_name == TEST_RULEBOOK["name"] + swapped_ruleset = yaml.safe_load(activation.rulebook_rulesets) + assert list(swapped_ruleset[0]["sources"][0].keys()) == [ + "ansible.eda.pg_listener", + "name", + ] + assert data["restarted_at"] is None + assert activation.status == ActivationStatus.PENDING + assert ( + activation.status_message + == ACTIVATION_STATUS_MESSAGE_MAP[activation.status] + ) + assert data["webhooks"][0]["name"] == "test-webhook1" + assert data["webhooks"][1]["name"] == "test-webhook2" diff --git a/tests/integration/api/test_webhook.py b/tests/integration/api/test_webhook.py new file mode 100644 index 000000000..f8d546f8a --- /dev/null +++ b/tests/integration/api/test_webhook.py @@ -0,0 +1,547 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import base64 +import hashlib +import hmac +import secrets +import uuid +from unittest import mock +from urllib.parse import urlencode + +import pytest +import yaml +from rest_framework import status +from rest_framework.renderers import JSONRenderer +from rest_framework.test import APIClient + +from aap_eda.core import models +from aap_eda.core.enums import Action, ResourceType +from tests.integration.constants import api_url_v1 + + +@pytest.mark.django_db +def test_list_webhooks( + client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + webhooks = models.Webhook.objects.bulk_create( + [ + models.Webhook( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret="secret", + ), + models.Webhook( + uuid=uuid.uuid4(), + name="test-webhook-2", + user=default_user, + url="http://www.example.com", + secret="secret", + ), + ] + ) + + response = client.get(f"{api_url_v1}/webhooks/") + assert response.status_code == status.HTTP_200_OK + assert len(response.data["results"]) == 2 + assert response.data["results"][0]["name"] == webhooks[0].name + assert response.data["results"][0]["user"] == "luke.skywalker" + + check_permission_mock.assert_called_once_with( + mock.ANY, mock.ANY, ResourceType.WEBHOOK, Action.READ + ) + + +@pytest.mark.django_db +def test_retrieve_webhook( + client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret="secret", + ) + + response = client.get(f"{api_url_v1}/webhooks/{webhook.id}/") + assert response.status_code == status.HTTP_200_OK + assert response.data["name"] == webhook.name + assert response.data["url"] == webhook.url + assert response.data["user"] == "luke.skywalker" + + check_permission_mock.assert_called_once_with( + mock.ANY, mock.ANY, ResourceType.WEBHOOK, Action.READ + ) + + +@pytest.mark.django_db +def test_create_webhook( + client: APIClient, + check_permission_mock: mock.Mock, +): + data_in = { + "name": "test_webhook", + "header_key": "xyz", + "hmac_signature_prefix": "xyz=", + "secret": "some secret value", + } + response = client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_201_CREATED + result = response.data + assert result["name"] == "test_webhook" + assert result["user"] == "test.admin" + assert result["header_key"] == "xyz" + assert result["hmac_signature_prefix"] == "xyz=" + + check_permission_mock.assert_called_once_with( + mock.ANY, mock.ANY, ResourceType.WEBHOOK, Action.CREATE + ) + + +@pytest.mark.django_db +def test_post_webhook( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = {signature_header_name: hash_object.hexdigest()} + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_bad_secret( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + test_mode=True, + ) + bad_secret = secrets.token_hex(32) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + bad_secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = {signature_header_name: hash_object.hexdigest()} + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + webhook.refresh_from_db() + assert ( + webhook.test_error_message + == "Signature mismatch, check your payload and secret" + ) + + +@pytest.mark.django_db +def test_post_webhook_with_prefix( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: f"{hmac_signature_prefix}" + f"{hash_object.hexdigest()}" + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_test_mode( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + test_mode=True, + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: ( + f"{hmac_signature_prefix}" f"{hash_object.hexdigest()}" + ) + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + webhook.refresh_from_db() + test_data = yaml.safe_load(webhook.test_content) + assert test_data["a"] == 1 + assert test_data["b"] == 2 + assert test_data["eda_webhook_name"] == "test-webhook-1" + assert webhook.test_content_type == "application/json" + + +@pytest.mark.django_db +def test_create_webhook_bad_algorithm( + client: APIClient, + check_permission_mock: mock.Mock, +): + data_in = { + "name": "test_webhook", + "header_key": "xyz", + "hmac_signature_prefix": "xyz=", + "hmac_algorithm": "nada", + } + response = client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_400_BAD_REQUEST + result = response.data + assert "Invalid hash algorithm nada" in str(result["hmac_algorithm"][0]) + + +@pytest.mark.django_db +def test_create_webhook_bad_format( + client: APIClient, + check_permission_mock: mock.Mock, +): + data_in = { + "name": "test_webhook", + "header_key": "xyz", + "hmac_signature_prefix": "xyz=", + "hmac_format": "nada", + } + response = client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_400_BAD_REQUEST + result = response.data + assert "Invalid hash format nada" in str(result["hmac_format"][0]) + + +@pytest.mark.django_db +def test_delete_webhook( + client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret="secret", + ) + + response = client.delete(f"{api_url_v1}/webhooks/{webhook.id}/") + assert response.status_code == status.HTTP_204_NO_CONTENT + + check_permission_mock.assert_called_once_with( + mock.ANY, mock.ANY, ResourceType.WEBHOOK, Action.DELETE + ) + + +@pytest.mark.django_db +def test_update_webhook( + client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret="secret", + ) + new_secret = "gobbledegook" + response = client.patch( + f"{api_url_v1}/webhooks/{webhook.id}/", + data={"test_mode": True, "secret": new_secret}, + ) + assert response.status_code == status.HTTP_200_OK + + check_permission_mock.assert_called_once_with( + mock.ANY, mock.ANY, ResourceType.WEBHOOK, Action.UPDATE + ) + webhook.refresh_from_db() + assert webhook.test_mode + assert webhook.secret.get_secret_value() == new_secret + + +@pytest.mark.django_db +def test_post_webhook_with_bad_uuid( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: f"{hmac_signature_prefix}" + f"{hash_object.hexdigest()}" + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{str(uuid.uuid4())}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.django_db +def test_post_webhook_with_form_urlencoded( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + ) + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: ( + f"{hmac_signature_prefix}" f"{hash_object.hexdigest()}" + ), + "Content-Type": content_type, + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_base64_format( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + hmac_format="base64", + ) + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + b64_signature = base64.b64encode(hash_object.digest()).decode() + headers = { + signature_header_name: (f"{hmac_signature_prefix}" f"{b64_signature}"), + "Content-Type": content_type, + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_token( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + auth_type="token", + ) + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + headers = { + signature_header_name: secret, + "Content-Type": content_type, + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_test_mode_extra_headers( + base_client: APIClient, + check_permission_mock: mock.Mock, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "X-Gitlab-Token" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + user=default_user, + url="http://www.example.com", + auth_type="token", + secret=secret, + header_key=signature_header_name, + test_mode=True, + additional_data_headers=[ + "X-Gitlab-Event", + "X-Gitlab-Event-UUID", + "X-Gitlab-Webhook-UUID", + ], + ) + data = {"a": 1, "b": 2} + headers = { + "X-Gitlab-Event-UUID": "c2675c66-7e6e-4fe2-9ac3-288534ef34b9", + "X-Gitlab-Instance": "https://gitlab.com", + signature_header_name: secret, + "X-Gitlab-Webhook-UUID": "b697868f-3b59-4a1f-985d-47f79e2b05ff", + "X-Gitlab-Event": "Push Hook", + } + + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + webhook.refresh_from_db() + test_data = yaml.safe_load(webhook.test_content) + assert test_data["a"] == 1 + assert test_data["b"] == 2 + assert ( + test_data["X-Gitlab-Event-UUID"] + == "c2675c66-7e6e-4fe2-9ac3-288534ef34b9" + ) + assert ( + test_data["X-Gitlab-Webhook-UUID"] + == "b697868f-3b59-4a1f-985d-47f79e2b05ff" + ) + assert test_data["X-Gitlab-Event"] == "Push Hook" + assert test_data["eda_webhook_name"] == "test-webhook-1" + assert webhook.test_content_type == "application/json" diff --git a/tools/docker/docker-compose-mac.yml b/tools/docker/docker-compose-mac.yml index 0fe1633b8..8b99f3e24 100644 --- a/tools/docker/docker-compose-mac.yml +++ b/tools/docker/docker-compose-mac.yml @@ -21,6 +21,12 @@ x-environment: - EDA_MAX_RUNNING_ACTIVATIONS=${EDA_MAX_RUNNING_ACTIVATIONS:-5} - EDA_ACTIVATION_RESTART_SECONDS_ON_COMPLETE=${EDA_ACTIVATION_RESTART_SECONDS_ON_COMPLETE:-60} - EDA_ACTIVATION_RESTART_SECONDS_ON_FAILURE=${EDA_ACTIVATION_RESTART_SECONDS_ON_FAILURE:-60} + - EDA_SERVER_UUID=edgecafe-beef-feed-fade-decadeedgecafe + - EDA_PG_NOTIFY_DSN_CLIENT="host=host.containers.internal port=5432 dbname=eda user=postgres password=secret" + - EDA_PG_NOTIFY_DSN="host=postgres port=5432 dbname=eda user=postgres password=secret" + - EDA_WEBHOOK_URL_PREFIX=${EDA_WEBHOOK_URL_PREFIX:-https://my_nginix_server/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook} + - EDA_WEBHOOK_HOST=${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} + - EDA_WEBHOOK_SERVER=http://${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} services: eda-ui: @@ -29,7 +35,7 @@ services: ports: - '${EDA_UI_PORT:-8443}:443' depends_on: - eda-api: + eda-webhook-api: condition: service_healthy eda-api: @@ -165,5 +171,26 @@ services: retries: 3 start_period: 5s + eda-webhook-api: + image: "${EDA_IMAGE:-quay.io/ansible/eda-server:main}" + environment: *common-env + security_opt: + - label=disable + command: + - /bin/bash + - -c + - aap-eda-manage runserver 0.0.0.0:8000 + ports: + - '8555:8000' + depends_on: + eda-api: + condition: service_healthy + healthcheck: + test: [ 'CMD', 'curl', '-q', 'http://0.0.0.0:8000/_healthz' ] + interval: 30s + timeout: 5s + retries: 10 + start_period: 15s + volumes: postgres_data: {} diff --git a/tools/docker/docker-compose-stage.yaml b/tools/docker/docker-compose-stage.yaml index bd267396f..8a70ccf65 100644 --- a/tools/docker/docker-compose-stage.yaml +++ b/tools/docker/docker-compose-stage.yaml @@ -22,6 +22,12 @@ x-environment: - EDA_MAX_RUNNING_ACTIVATIONS=${EDA_MAX_RUNNING_ACTIVATIONS:-5} - EDA_ACTIVATION_RESTART_SECONDS_ON_COMPLETE=${EDA_ACTIVATION_RESTART_SECONDS_ON_COMPLETE:-60} - EDA_ACTIVATION_RESTART_SECONDS_ON_FAILURE=${EDA_ACTIVATION_RESTART_SECONDS_ON_FAILURE:-60} + - EDA_SERVER_UUID=edgecafe-beef-feed-fade-decadeedgecafe + - EDA_PG_NOTIFY_DSN_CLIENT="host=host.containers.internal port=5432 dbname=eda user=postgres password=secret" + - EDA_PG_NOTIFY_DSN="host=postgres port=5432 dbname=eda user=postgres password=secret" + - EDA_WEBHOOK_URL_PREFIX=${EDA_WEBHOOK_URL_PREFIX:-https://my_nginix_server/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook} + - EDA_WEBHOOK_HOST=${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} + - EDA_WEBHOOK_SERVER=http://${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} services: podman-pre-setup: @@ -52,7 +58,7 @@ services: ports: - '8443:443' depends_on: - eda-api: + eda-webhook-api: condition: service_healthy eda-api: @@ -178,6 +184,24 @@ services: retries: 3 start_period: 5s + eda-webhook-api: + image: "${EDA_IMAGE:-quay.io/ansible/eda-server:main}" + environment: *common-env + command: + - /bin/bash + - -c + - aap-eda-manage runserver 0.0.0.0:8000 + ports: + - '8555:8000' + depends_on: + eda-api: + condition: service_healthy + healthcheck: + test: [ 'CMD', 'curl', '-q', 'http://localhost:8000/_healthz' ] + interval: 30s + timeout: 5s + retries: 10 + volumes: postgres_data: {} podman_data: {}