From ab3bf70b367a1213a60e4760a67a32a8c70ddc33 Mon Sep 17 00:00:00 2001 From: Madhu Kanoor Date: Tue, 9 Jan 2024 20:38:15 -0500 Subject: [PATCH] feat: activations with webhooks --- poetry.lock | 167 +++++- pyproject.toml | 4 + src/aap_eda/api/exceptions.py | 7 + src/aap_eda/api/filters/webhook.py | 29 + src/aap_eda/api/serializers/__init__.py | 4 + src/aap_eda/api/serializers/activation.py | 85 ++- src/aap_eda/api/serializers/webhook.py | 122 ++++ src/aap_eda/api/urls.py | 6 + src/aap_eda/api/views/__init__.py | 6 + src/aap_eda/api/views/external_webhook.py | 207 +++++++ src/aap_eda/api/views/webhook.py | 198 +++++++ src/aap_eda/api/webhook_authentication.py | 178 ++++++ src/aap_eda/core/enums.py | 20 + .../commands/create_initial_data.py | 6 + ...webhooks_webhook_ix_webhook_id_and_more.py | 263 +++++++++ src/aap_eda/core/models/__init__.py | 3 + src/aap_eda/core/models/activation.py | 2 + src/aap_eda/core/models/webhook.py | 179 ++++++ src/aap_eda/core/validators.py | 44 ++ src/aap_eda/services/pg_notify.py | 100 ++++ src/aap_eda/settings/default.py | 24 + .../api/test_activation_with_webhook.py | 227 +++++++ tests/integration/api/test_webhook.py | 554 ++++++++++++++++++ tests/integration/conftest.py | 40 ++ tools/docker/docker-compose-dev.yaml | 29 +- tools/docker/docker-compose-mac.yml | 30 +- tools/docker/docker-compose-stage.yaml | 30 +- 27 files changed, 2556 insertions(+), 8 deletions(-) create mode 100644 src/aap_eda/api/filters/webhook.py create mode 100644 src/aap_eda/api/serializers/webhook.py create mode 100644 src/aap_eda/api/views/external_webhook.py create mode 100644 src/aap_eda/api/views/webhook.py create mode 100644 src/aap_eda/api/webhook_authentication.py create mode 100644 src/aap_eda/core/migrations/0041_webhook_activation_webhooks_webhook_ix_webhook_id_and_more.py create mode 100644 src/aap_eda/core/models/webhook.py create mode 100644 src/aap_eda/services/pg_notify.py create mode 100644 tests/integration/api/test_activation_with_webhook.py create mode 100644 tests/integration/api/test_webhook.py diff --git a/poetry.lock b/poetry.lock index c2db20e7f..65e4bbfed 100644 --- a/poetry.lock +++ b/poetry.lock @@ -605,7 +605,7 @@ bcrypt = ["bcrypt"] [[package]] name = "django-ansible-base" -version = "2024.4.25" +version = "2024.5.31" description = "A Django app used by ansible services" optional = false python-versions = ">=3.9" @@ -622,11 +622,12 @@ djangorestframework = "*" inflection = "*" [package.extras] -all = ["channels", "cryptography", "django-auth-ldap", "django-redis", "drf-spectacular", "pyjwt", "pytest", "pytest-django", "python-ldap", "python3-saml", "redis", "requests", "social-auth-app-django", "tabulate", "tacacs_plus", "xmlsec (==1.3.13)"] +all = ["channels", "cryptography", "django-auth-ldap", "django-oauth-toolkit (<2.4.0)", "django-redis", "drf-spectacular", "pyjwt", "pyrad", "pytest", "pytest-django", "python-ldap", "python3-saml", "redis", "requests", "social-auth-app-django", "tabulate", "tacacs_plus", "xmlsec (==1.3.13)"] api-documentation = ["drf-spectacular"] -authentication = ["django-auth-ldap", "python-ldap", "python3-saml", "social-auth-app-django", "tabulate", "tacacs_plus", "xmlsec (==1.3.13)"] +authentication = ["django-auth-ldap", "pyrad", "python-ldap", "python3-saml", "social-auth-app-django", "tabulate", "tacacs_plus", "xmlsec (==1.3.13)"] channel-auth = ["channels"] jwt-consumer = ["pyjwt", "requests"] +oauth2-provider = ["django-oauth-toolkit (<2.4.0)"] redis-client = ["django-redis", "redis"] testing = ["cryptography", "pytest", "pytest-django"] @@ -788,6 +789,24 @@ toml = ["toml"] vault = ["hvac"] yaml = ["ruamel.yaml"] +[[package]] +name = "ecdsa" +version = "0.19.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" +files = [ + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + [[package]] name = "eradicate" version = "2.2.0" @@ -1430,6 +1449,29 @@ files = [ [package.dependencies] wcwidth = "*" +[[package]] +name = "psycopg" +version = "3.1.19" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg-3.1.19-py3-none-any.whl", hash = "sha256:dca5e5521c859f6606686432ae1c94e8766d29cc91f2ee595378c510cc5b0731"}, + {file = "psycopg-3.1.19.tar.gz", hash = "sha256:92d7b78ad82426cdcf1a0440678209faa890c6e1721361c2f8901f0dccd62961"}, +] + +[package.dependencies] +typing-extensions = ">=4.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.1.19)"] +c = ["psycopg-c (==3.1.19)"] +dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + [[package]] name = "psycopg2" version = "2.9.6" @@ -2351,6 +2393,123 @@ docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + [[package]] name = "zope-interface" version = "6.0" @@ -2405,4 +2564,4 @@ dev = ["psycopg2-binary"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "efde77760ee3f6931ee130b87e4858d963e82dae51c9bfa842e3e2855d991060" +content-hash = "9cc05c72170d23d087227433831e23436a4da01a8f70ec1f5b49540de40f8cb8" diff --git a/pyproject.toml b/pyproject.toml index 9e8b40d99..e4982f004 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,10 @@ django-split-settings = "^1.2.0" pexpect = "^4.9.0" ansible-runner = ">=2.3" python-gnupg = "^0.5.2" +psycopg = "^3.1.17" +xxhash = "*" +pyjwt = { version="*", extras = ["cryptography"] } +ecdsa = "*" [tool.poetry.group.test.dependencies] pytest = "*" diff --git a/src/aap_eda/api/exceptions.py b/src/aap_eda/api/exceptions.py index 7f3f778b1..b4aa9d5d4 100644 --- a/src/aap_eda/api/exceptions.py +++ b/src/aap_eda/api/exceptions.py @@ -125,3 +125,10 @@ class InvalidEventStreamRulebook(APIException): default_detail = ( "Configuration Error: Event stream template rulebook is invalid" ) + + +class InvalidWebhookSource(APIException): + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + default_detail = ( + "Configuration Error: Webhook source could not be upated in ruleset" + ) diff --git a/src/aap_eda/api/filters/webhook.py b/src/aap_eda/api/filters/webhook.py new file mode 100644 index 000000000..b4547c7bf --- /dev/null +++ b/src/aap_eda/api/filters/webhook.py @@ -0,0 +1,29 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import django_filters + +from aap_eda.core import models + + +class WebhookFilter(django_filters.FilterSet): + name = django_filters.CharFilter( + field_name="name", + lookup_expr="istartswith", + label="Filter by webhook name.", + ) + + class Meta: + model = models.Webhook + fields = ["name"] diff --git a/src/aap_eda/api/serializers/__init__.py b/src/aap_eda/api/serializers/__init__.py index b9b2b9bfc..9ed8ac854 100644 --- a/src/aap_eda/api/serializers/__init__.py +++ b/src/aap_eda/api/serializers/__init__.py @@ -78,6 +78,7 @@ UserListSerializer, UserSerializer, ) +from .webhook import WebhookInSerializer, WebhookOutSerializer __all__ = ( # auth @@ -131,4 +132,7 @@ "TeamCreateSerializer", "TeamUpdateSerializer", "TeamDetailSerializer", + # webhooks + "WebhookInSerializer", + "WebhookOutSerializer", ) diff --git a/src/aap_eda/api/serializers/activation.py b/src/aap_eda/api/serializers/activation.py index d113addd0..545d06acb 100644 --- a/src/aap_eda/api/serializers/activation.py +++ b/src/aap_eda/api/serializers/activation.py @@ -26,7 +26,10 @@ EDA_SERVER_VAULT_LABEL, PG_NOTIFY_TEMPLATE_RULEBOOK_DATA, ) -from aap_eda.api.exceptions import InvalidEventStreamRulebook +from aap_eda.api.exceptions import ( + InvalidEventStreamRulebook, + InvalidWebhookSource, +) from aap_eda.api.serializers.decision_environment import ( DecisionEnvironmentRefSerializer, ) @@ -39,6 +42,7 @@ ProjectRefSerializer, ) from aap_eda.api.serializers.rulebook import RulebookRefSerializer +from aap_eda.api.serializers.webhook import WebhookOutSerializer from aap_eda.api.vault import encrypt_string from aap_eda.core import models, validators from aap_eda.core.enums import DefaultCredentialType, ProcessParentType @@ -63,6 +67,36 @@ class VaultData: password_used: bool = False +def _update_webhook_source(validated_data: dict, vault_data: VaultData): + try: + vault_data.password_used = True + encrypted_dsn = encrypt_string( + password=vault_data.password, + plaintext=settings.PG_NOTIFY_DSN, + vault_id=EDA_SERVER_VAULT_LABEL, + ) + + channels = [] + for webhook_id in validated_data.get("webhooks"): + obj = models.Webhook.objects.get(id=webhook_id) + channels.append(obj.channel_name) + + sources_info = [ + { + "name": "webhook_event_stream", + "type": "ansible.eda.pg_listener", + "args": { + "dsn": encrypted_dsn, + "channels": channels, + }, + } + ] + return swap_sources(validated_data["rulebook_rulesets"], sources_info) + except Exception as e: + logger.error(f"Failed to update webhook source in rulesets: {e}") + raise InvalidWebhookSource(e) + + def _updated_ruleset(validated_data: dict, vault_data: VaultData): try: sources_info = [] @@ -215,6 +249,12 @@ class ActivationSerializer(serializers.ModelSerializer): child=EdaCredentialSerializer(), ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=WebhookOutSerializer(), + ) + class Meta: model = models.Activation fields = [ @@ -241,6 +281,7 @@ class Meta: "event_streams", "eda_credentials", "log_level", + "webhooks", ] read_only_fields = [ "id", @@ -272,6 +313,11 @@ class ActivationListSerializer(serializers.ModelSerializer): allow_blank=True, help_text="Service name of the activation", ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=WebhookOutSerializer(), + ) class Meta: model = models.Activation @@ -300,6 +346,7 @@ class Meta: "log_level", "eda_credentials", "k8s_service_name", + "webhooks", ] read_only_fields = ["id", "created_at", "modified_at"] @@ -320,6 +367,10 @@ def to_representation(self, activation): if activation.extra_var else None ) + webhooks = [ + WebhookOutSerializer(webhook).data + for webhook in activation.webhooks.all() + ] return { "id": activation.id, @@ -346,6 +397,7 @@ def to_representation(self, activation): "log_level": activation.log_level, "eda_credentials": eda_credentials, "k8s_service_name": activation.k8s_service_name, + "webhooks": webhooks, } @@ -369,6 +421,7 @@ class Meta: "log_level", "eda_credentials", "k8s_service_name", + "webhooks", ] organization_id = serializers.IntegerField( @@ -413,6 +466,12 @@ class Meta: allow_blank=True, validators=[validators.check_if_rfc_1035_compliant], ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=serializers.IntegerField(), + validators=[validators.check_if_webhooks_exists], + ) def validate(self, data): _validate_credentials_and_token_and_rulebook(data=data, creating=True) @@ -440,6 +499,11 @@ def create(self, validated_data): validated_data, vault_data ) + if validated_data.get("webhooks", []): + validated_data["rulebook_rulesets"] = _update_webhook_source( + validated_data, vault_data + ) + vault = _get_vault_credential_type() if validated_data.get("eda_credentials"): @@ -529,6 +593,11 @@ class ActivationReadSerializer(serializers.ModelSerializer): allow_blank=True, help_text="Service name of the activation", ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=WebhookOutSerializer(), + ) class Meta: model = models.Activation @@ -560,6 +629,7 @@ class Meta: "event_streams", "log_level", "k8s_service_name", + "webhooks", ] read_only_fields = ["id", "created_at", "modified_at", "restarted_at"] @@ -614,6 +684,10 @@ def to_representation(self, activation): if activation.extra_var else None ) + webhooks = [ + WebhookOutSerializer(webhook).data + for webhook in activation.webhooks.all() + ] return { "id": activation.id, @@ -645,6 +719,7 @@ def to_representation(self, activation): "log_level": activation.log_level, "eda_credentials": eda_credentials, "k8s_service_name": activation.k8s_service_name, + "webhooks": webhooks, } @@ -677,6 +752,12 @@ class PostActivationSerializer(serializers.ModelSerializer): allow_blank=True, validators=[validators.check_if_rfc_1035_compliant], ) + webhooks = serializers.ListField( + required=False, + allow_null=True, + child=serializers.IntegerField(), + validators=[validators.check_if_webhooks_exists], + ) def validate(self, data): _validate_credentials_and_token_and_rulebook(data=data, creating=False) @@ -699,6 +780,7 @@ class Meta: "rulebook_id", "eda_credentials", "k8s_service_name", + "webhooks", ] read_only_fields = [ "id", @@ -724,6 +806,7 @@ def is_activation_valid(activation: models.Activation) -> tuple[bool, str]: data["eda_credentials"] = [ obj.id for obj in activation.eda_credentials.all() ] + data["webhooks"] = [obj.id for obj in activation.webhooks.all()] serializer = PostActivationSerializer(data=data) valid = serializer.is_valid() diff --git a/src/aap_eda/api/serializers/webhook.py b/src/aap_eda/api/serializers/webhook.py new file mode 100644 index 000000000..4fa586b57 --- /dev/null +++ b/src/aap_eda/api/serializers/webhook.py @@ -0,0 +1,122 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +from rest_framework import serializers + +from aap_eda.api.serializers.organization import OrganizationRefSerializer +from aap_eda.core import models, validators + + +class WebhookInSerializer(serializers.ModelSerializer): + organization_id = serializers.IntegerField(required=False, allow_null=True) + owner = serializers.HiddenField(default=serializers.CurrentUserDefault()) + hmac_algorithm = serializers.CharField( + required=False, + allow_null=True, + allow_blank=True, + help_text="Hash algorithm to use", + validators=[validators.valid_hash_algorithm], + ) + hmac_format = serializers.CharField( + required=False, + allow_null=True, + allow_blank=True, + help_text="Hash format to use, hex or base64", + validators=[validators.valid_hash_format], + ) + auth_type = serializers.CharField( + help_text="Auth type to use hmac or token or basic", + validators=[validators.valid_webhook_auth_type], + ) + additional_data_headers = serializers.ListField( + required=False, + allow_null=True, + child=serializers.CharField(), + ) + + class Meta: + model = models.Webhook + fields = [ + "name", + "type", + "hmac_algorithm", + "header_key", + "auth_type", + "hmac_signature_prefix", + "hmac_format", + "owner", + "secret", + "username", + "test_mode", + "additional_data_headers", + "organization_id", + "client_id", + "client_secret", + "introspection_url", + "jwks_url", + "audience", + "public_key", + "ecdsa_prefix_header_key", + "certificate_subject", + ] + + +class WebhookOutSerializer(serializers.ModelSerializer): + owner = serializers.SerializerMethodField() + organization = serializers.SerializerMethodField() + + class Meta: + model = models.Webhook + read_only_fields = [ + "id", + "owner", + "url", + "type", + "created_at", + "modified_at", + "test_content_type", + "test_content", + "test_error_message", + ] + fields = [ + "name", + "test_mode", + "hmac_algorithm", + "header_key", + "hmac_signature_prefix", + "hmac_format", + "auth_type", + "additional_data_headers", + "username", + "organization", + "client_id", + "jwks_url", + "audience", + "introspection_url", + "public_key", + "ecdsa_prefix_header_key", + "certificate_subject", + *read_only_fields, + ] + + def get_owner(self, obj) -> str: + return f"{obj.owner.username}" + + def get_organization(self, obj) -> Optional[OrganizationRefSerializer]: + return ( + OrganizationRefSerializer(obj.organization).data + if obj.organization + else None + ) diff --git a/src/aap_eda/api/urls.py b/src/aap_eda/api/urls.py index 7f80fa23e..5b7134934 100644 --- a/src/aap_eda/api/urls.py +++ b/src/aap_eda/api/urls.py @@ -55,6 +55,12 @@ router.register("decision-environments", views.DecisionEnvironmentViewSet) router.register("organizations", views.OrganizationViewSet) router.register("teams", views.TeamViewSet) +router.register("webhooks", views.WebhookViewSet) +router.register( + "external_webhook", + views.ExternalWebhookViewSet, + basename="external_webhook", +) openapi_urls = [ path( diff --git a/src/aap_eda/api/views/__init__.py b/src/aap_eda/api/views/__init__.py index 0c5edd6c8..fd74ad61a 100644 --- a/src/aap_eda/api/views/__init__.py +++ b/src/aap_eda/api/views/__init__.py @@ -18,11 +18,13 @@ from .decision_environment import DecisionEnvironmentViewSet from .eda_credential import EdaCredentialViewSet from .event_stream import EventStreamViewSet +from .external_webhook import ExternalWebhookViewSet from .organization import OrganizationViewSet from .project import ProjectViewSet from .rulebook import AuditRuleViewSet, RulebookViewSet from .team import TeamViewSet from .user import CurrentUserAwxTokenViewSet, CurrentUserView, UserViewSet +from .webhook import WebhookViewSet __all__ = ( # auth @@ -51,4 +53,8 @@ "OrganizationViewSet", # teams "TeamViewSet", + # webhook + "WebhookViewSet", + # External webhook + "ExternalWebhookViewSet", ) diff --git a/src/aap_eda/api/views/external_webhook.py b/src/aap_eda/api/views/external_webhook.py new file mode 100644 index 000000000..dc9bdf796 --- /dev/null +++ b/src/aap_eda/api/views/external_webhook.py @@ -0,0 +1,207 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import urllib.parse + +import yaml +from django.conf import settings +from django.http.request import HttpHeaders +from drf_spectacular.utils import extend_schema +from rest_framework import status, viewsets +from rest_framework.decorators import action +from rest_framework.exceptions import AuthenticationFailed, ParseError +from rest_framework.permissions import AllowAny +from rest_framework.response import Response + +from aap_eda.api.webhook_authentication import ( + validate_basic_auth, + validate_ecdsa, + validate_hmac_auth, + validate_jwt_access_token, + validate_mtls_auth, + validate_oauth_access_token, + validate_token_auth, +) +from aap_eda.core.enums import Action, ResourceType, WebhookAuthType +from aap_eda.core.models import Webhook +from aap_eda.services.pg_notify import PGNotify + +logger = logging.getLogger(__name__) + + +class ExternalWebhookViewSet(viewsets.GenericViewSet): + queryset = Webhook.objects.all() + rbac_action = None + rbac_resource_type = ResourceType.WEBHOOK + permission_classes = [AllowAny] + authentication_classes = [] + + def get_rbac_permission(self): + if self.action == "post": + return ResourceType.WEBHOOK, Action.READ + + def _update_test_data( + self, + error_message: str = "", + content_type: str = "", + content: str = "", + ): + logger.warning( + f"The webhook: {self.webhook.name} is currently in test mode" + ) + self.webhook.test_error_message = error_message + self.webhook.test_content_type = content_type + self.webhook.test_content = content + self.webhook.save( + update_fields=[ + "test_content_type", + "test_content", + "test_error_message", + ] + ) + + def _parse_body(self, content_type: str, body: bytes) -> dict: + if content_type == "application/json": + data = yaml.safe_load(body.decode()) + elif content_type == "application/x-www-form-urlencoded": + data = urllib.parse.parse_qs(body.decode()) + else: + try: + data = yaml.safe_load(body.decode()) + except Exception: + message = f"Invalid content type passed {content_type}" + logger.error(message) + raise ParseError(message) + return data + + def _embellish_data(self, headers: HttpHeaders, data: dict): + data["eda_webhook_name"] = self.webhook.name + if not self.webhook.additional_data_headers: + return + for key in self.webhook.additional_data_headers: + value = headers.get(key) + if value: + data[key] = value + + def _handle_auth(self, request): + try: + if self.webhook.auth_type == WebhookAuthType.HMAC: + validate_hmac_auth( + self.webhook.secret.get_secret_value().encode("utf-8"), + self.webhook.hmac_format, + self.webhook.hmac_signature_prefix, + request.body, + request.headers[self.webhook.header_key], + self.webhook.hmac_algorithm, + ) + elif self.webhook.auth_type == WebhookAuthType.MTLS: + validate_mtls_auth( + self.webhook.certificate_subject, + request.headers[self.webhook.header_key], + ) + elif self.webhook.auth_type == WebhookAuthType.TOKEN: + validate_token_auth( + self.webhook.secret.get_secret_value(), + request.headers[self.webhook.header_key], + ) + elif self.webhook.auth_type == WebhookAuthType.BASIC: + validate_basic_auth( + self.webhook.username, + self.webhook.secret.get_secret_value(), + request.headers[self.webhook.header_key], + ) + elif self.webhook.auth_type == WebhookAuthType.OAUTH2JWT: + validate_jwt_access_token( + self.webhook.jwks_url, + self.webhook.audience, + request.headers[self.webhook.header_key], + ) + elif self.webhook.auth_type == WebhookAuthType.OAUTH2: + validate_oauth_access_token( + self.webhook.introspection_url, + request.headers[self.webhook.header_key], + self.webhook.client_id, + self.webhook.client_secret.get_secret_value(), + ) + elif self.webhook.auth_type == WebhookAuthType.ECDSA: + if self.webhook.ecdsa_prefix_header_key: + content_prefix = request.headers[ + self.webhook.ecdsa_prefix_header_key + ] + else: + content_prefix = "" + + validate_ecdsa( + request.body, + self.webhook.public_key, + request.headers[self.webhook.header_key], + content_prefix, + ) + else: + message = "Unknown auth type" + logger.error(message) + raise ParseError(message) + except AuthenticationFailed as err: + if self.webhook.test_mode: + self._update_test_data(error_message=err) + raise + + @extend_schema(exclude=True) + @action(detail=True, methods=["POST"], rbac_action=None) + def post(self, request, *args, **kwargs): + try: + self.webhook = Webhook.objects.get(uuid=kwargs["pk"]) + except Webhook.DoesNotExist: + raise ParseError("bad uuid specified") + + logger.debug(f"Headers {request.headers}") + logger.debug(f"Body {request.body}") + if self.webhook.header_key not in request.headers: + message = f"{self.webhook.header_key} header is missing" + logger.error(message) + if self.webhook.test_mode: + self._update_test_data(error_message=message) + raise ParseError(message) + + self._handle_auth(request) + + body = self._parse_body( + request.headers.get("Content-Type", ""), request.body + ) + # Some sites send in an array or a string + if isinstance(body, dict): + data = body + else: + data = {"body": body} + + logger.debug(f"Data: {data}") + self._embellish_data(request.headers, data) + if self.webhook.test_mode: + self._update_test_data( + content=yaml.dump(data), + content_type=request.headers.get("Content-Type", "unknown"), + ) + else: + try: + PGNotify( + settings.PG_NOTIFY_DSN_SERVER, + self.webhook.channel_name, + data, + )() + except Exception as e: + logger.error(e) + return Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + return Response(status=status.HTTP_200_OK) diff --git a/src/aap_eda/api/views/webhook.py b/src/aap_eda/api/views/webhook.py new file mode 100644 index 000000000..62c4041fb --- /dev/null +++ b/src/aap_eda/api/views/webhook.py @@ -0,0 +1,198 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from ansible_base.rbac.api.related import check_related_permissions +from ansible_base.rbac.models import RoleDefinition +from django.conf import settings +from django.db import transaction +from django.forms import model_to_dict +from django_filters import rest_framework as defaultfilters +from drf_spectacular.utils import OpenApiResponse, extend_schema +from rest_framework import mixins, status, viewsets +from rest_framework.response import Response + +from aap_eda.api import serializers +from aap_eda.api.filters.webhook import WebhookFilter +from aap_eda.core import models +from aap_eda.core.enums import ResourceType, WebhookAuthType + +logger = logging.getLogger(__name__) + + +class WebhookViewSet( + mixins.CreateModelMixin, + viewsets.ReadOnlyModelViewSet, + mixins.DestroyModelMixin, +): + queryset = models.Webhook.objects.order_by("-created_at") + filter_backends = (defaultfilters.DjangoFilterBackend,) + filterset_class = WebhookFilter + rbac_resource_type = ResourceType.WEBHOOK + + def get_serializer_class(self): + if self.action == "list": + return serializers.WebhookOutSerializer + elif self.action == "destroy": + return serializers.WebhookOutSerializer + elif self.action == "create": + return serializers.WebhookInSerializer + elif self.action == "partial_update": + return serializers.WebhookInSerializer + + return serializers.WebhookOutSerializer + + def get_response_serializer_class(self): + return serializers.WebhookOutSerializer + + def filter_queryset(self, queryset): + return super().filter_queryset( + queryset.model.access_qs(self.request.user, queryset=queryset) + ) + + @extend_schema( + description="Get the Webhook by its id", + responses={ + status.HTTP_200_OK: OpenApiResponse( + serializers.WebhookOutSerializer, + description="Return the webhook by its id.", + ), + }, + ) + def retrieve(self, request, pk: int): + webhook = self.get_object() + return Response(serializers.WebhookOutSerializer(webhook).data) + + @extend_schema( + description="Delete a Webhook by its id", + responses={ + status.HTTP_204_NO_CONTENT: OpenApiResponse( + None, description="Delete successful." + ) + }, + ) + def destroy(self, request, *args, **kwargs): + webhook = self.get_object() + self.perform_destroy(webhook) + return Response(status=status.HTTP_204_NO_CONTENT) + + @extend_schema( + description="List all webhooks", + responses={ + status.HTTP_200_OK: OpenApiResponse( + serializers.WebhookOutSerializer(many=True), + description="Return a list of webhook.", + ), + }, + ) + def list(self, request): + webhooks = models.Webhook.objects.all() + webhooks = self.filter_queryset(webhooks) + serializer = serializers.WebhookOutSerializer(webhooks, many=True) + result = self.paginate_queryset(serializer.data) + + return self.get_paginated_response(result) + + @extend_schema( + request=serializers.WebhookInSerializer, + responses={ + status.HTTP_201_CREATED: OpenApiResponse( + serializers.WebhookOutSerializer, + description="Return the new webhook.", + ), + status.HTTP_400_BAD_REQUEST: OpenApiResponse( + description="Invalid data to create webhook." + ), + }, + ) + def create(self, request): + context = {"request": request} + serializer = serializers.WebhookInSerializer( + data=request.data, + context=context, + ) + serializer.is_valid(raise_exception=True) + with transaction.atomic(): + response = serializer.save() + check_related_permissions( + request.user, + serializer.Meta.model, + {}, + model_to_dict(serializer.instance), + ) + RoleDefinition.objects.give_creator_permissions( + request.user, serializer.instance + ) + if response.auth_type == WebhookAuthType.MTLS: + response.url = ( + f"{settings.WEBHOOK_MTLS_URL_PREFIX}/{response.uuid}/post/" + ) + else: + response.url = ( + f"{settings.WEBHOOK_URL_PREFIX}/{response.uuid}/post/" + ) + response.save(update_fields=["url"]) + + return Response( + serializers.WebhookOutSerializer(response).data, + status=status.HTTP_201_CREATED, + ) + + @extend_schema( + request=serializers.WebhookInSerializer, + responses={ + status.HTTP_200_OK: OpenApiResponse( + serializers.WebhookOutSerializer, + description="Update successful, return the new webhook.", + ), + status.HTTP_400_BAD_REQUEST: OpenApiResponse( + description="Unable to update webhook." + ), + }, + ) + def partial_update(self, request, *args, **kwargs): + webhook = self.get_object() + old_data = model_to_dict(webhook) + context = {"request": request} + serializer = serializers.WebhookInSerializer( + webhook, + data=request.data, + context=context, + partial=True, + ) + serializer.is_valid(raise_exception=True) + if webhook.test_mode != request.data.get( + "test_mode", webhook.test_mode + ): + webhook.test_content_type = "" + webhook.test_content = "" + webhook.test_error_message = "" + + for key, value in serializer.validated_data.items(): + setattr(webhook, key, value) + + with transaction.atomic(): + webhook.save() + check_related_permissions( + request.user, + serializer.Meta.model, + old_data, + model_to_dict(webhook), + ) + + return Response( + serializers.WebhookOutSerializer(webhook).data, + status=status.HTTP_200_OK, + ) diff --git a/src/aap_eda/api/webhook_authentication.py b/src/aap_eda/api/webhook_authentication.py new file mode 100644 index 000000000..3397e3013 --- /dev/null +++ b/src/aap_eda/api/webhook_authentication.py @@ -0,0 +1,178 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import hmac +import logging +from hashlib import sha256 +from urllib.parse import urljoin + +import ecdsa +import jwt +import requests +from ecdsa.util import sigdecode_der +from rest_framework.exceptions import AuthenticationFailed + +from aap_eda.core.enums import HMACFormatType + +logger = logging.getLogger(__name__) + + +def validate_hmac_auth( + secret: str, + fmt: str, + prefix: str, + body: bytes, + signature: str, + algorithm: str, +): + hash_object = hmac.new( + secret, + msg=body, + digestmod=algorithm or "sha256", + ) + if not fmt: + fmt = HMACFormatType.BASE64 + + if fmt == HMACFormatType.HEX: + expected_signature = prefix + hash_object.hexdigest() + elif fmt == HMACFormatType.BASE64: + expected_signature = ( + prefix + base64.b64encode(hash_object.digest()).decode() + ) + + if not hmac.compare_digest(expected_signature, signature): + message = "Signature mismatch, check your payload and secret" + logger.warning(message) + raise AuthenticationFailed(message) + + +def validate_token_auth(secret: str, token: str): + if secret != _token_sans_bearer(token): + message = "Token mismatch, check your token" + logger.warning(message) + raise AuthenticationFailed(message) + + +def validate_mtls_auth(subject: str, cert_subject: str): + if subject and subject != cert_subject: + message = f"Subject Name mismatch : {cert_subject}" + logger.warning(message) + raise AuthenticationFailed(message) + + +def validate_basic_auth(username: str, password: str, auth_str: str): + if auth_str.startswith("Basic"): + auth_str = auth_str.split("Basic ")[1] + user_pass = f"{username}:{password}" + b64_value = base64.b64encode(user_pass.encode()).decode() + if auth_str != b64_value: + message = "Credential mismatch" + logger.warning(message) + raise AuthenticationFailed(message) + + +def validate_jwt_access_token(jwks_url: str, audience: str, access_token: str): + if not jwt.algorithms.has_crypto: + raise Exception( + "No crypto support for JWT, " + "please install the cryptography dependency" + ) + try: + access_token = _token_sans_bearer(access_token) + if not bool(jwks_url): + # Not safe to do this + url = jwt.decode( + access_token, options={"verify_signature": False} + )["iss"] + jwks_url = urljoin(url, ".well-known/jwks.json") + logger.error(f"JWKS URL is {jwks_url}") + + jwks_client = jwt.PyJWKClient( + jwks_url, cache_jwk_set=True, lifespan=360 + ) + options = { + "verify_signature": True, + "verify_exp": True, + "verify_nbf": True, + "verify_iat": True, + "verify_aud": False, + } + + if bool(audience): + options["verify_aud"] = True + + signing_key = jwks_client.get_signing_key_from_jwt(access_token) + data = jwt.decode( + access_token, + signing_key.key, + algorithms=["RS256"], + audience=audience, + options=options, + ) + return data + except jwt.exceptions.PyJWTError as err: + message = f"JWT Error {err}" + logger.warning(message) + raise AuthenticationFailed(message) + + +def validate_oauth_access_token( + introspection_url: str, token: str, client_id: str, client_secret: str +): + data = { + "token": _token_sans_bearer(token), + "token_type_hint": "access_token", + } + auth = (client_id, client_secret) + # For keycloak this data is not in JSON format instead of www-url-encoded + response = requests.post(introspection_url, data=data, auth=auth) + if response.status_code != 200: + message = f"Introspection Error {response.status_code} {response.text}" + logger.warning(message) + raise AuthenticationFailed(message) + + response_data = response.json() + if not response_data.get("active", False): + message = "User is not active" + logger.warning(message) + raise AuthenticationFailed(message) + + +def validate_ecdsa( + message: bytes, public_key_pem: str, signature: str, content_prefix: str +): + logger.debug(f"Public Key {public_key_pem}") + logger.debug(f"Signature {signature}") + logger.debug(f"Content Prefix {content_prefix}") + public_key = ecdsa.VerifyingKey.from_pem(public_key_pem.encode()) + signature = base64.b64decode(signature) + + message_bytes = bytearray() + if content_prefix: + message_bytes.extend(content_prefix.encode()) + + message_bytes.extend(message) + if not public_key.verify( + signature, bytes(message_bytes), sha256, sigdecode=sigdecode_der + ): + error = "ECDSA signature does not match" + logger.warning(error) + raise AuthenticationFailed(error) + + +def _token_sans_bearer(token: str) -> str: + if token.startswith("Bearer"): + return token.split("Bearer ")[1] + return token diff --git a/src/aap_eda/core/enums.py b/src/aap_eda/core/enums.py index 083804413..113666cb1 100644 --- a/src/aap_eda/core/enums.py +++ b/src/aap_eda/core/enums.py @@ -53,6 +53,7 @@ class ResourceType(DjangoStrEnum): EVENT_STREAM = "event_stream" ORGANIZATION = "organization" TEAM = "team" + WEBHOOK = "webhook" class Action(DjangoStrEnum): @@ -138,3 +139,22 @@ class RulebookProcessLogLevel(DjangoStrEnum): DEBUG = "debug" INFO = "info" ERROR = "error" + + +class WebhookAuthType(DjangoStrEnum): + """Types of authentication for Webhook.""" + + HMAC = "hmac" + TOKEN = "token" + BASIC = "basic" + OAUTH2 = "oauth2" + OAUTH2JWT = "oauth2-jwt" + ECDSA = "ecdsa" + MTLS = "mtls" + + +class HMACFormatType(DjangoStrEnum): + """Types of format for HMAC.""" + + BASE64 = "base64" + HEX = "hex" diff --git a/src/aap_eda/core/management/commands/create_initial_data.py b/src/aap_eda/core/management/commands/create_initial_data.py index d95446876..fef8c34e1 100644 --- a/src/aap_eda/core/management/commands/create_initial_data.py +++ b/src/aap_eda/core/management/commands/create_initial_data.py @@ -52,6 +52,7 @@ "decision_environment": CRUD, "credential_type": CRUD, "eda_credential": CRUD, + "webhook": CRUD, }, }, { @@ -75,6 +76,7 @@ "decision_environment": CRUD, "credential_type": ["add", "view"], "eda_credential": ["add", "view"], + "webhook": ["add", "view"], }, }, { @@ -101,6 +103,7 @@ "decision_environment": CRUD, "credential_type": ["add", "view"], "eda_credential": ["add", "view"], + "webhook": ["add", "view"], }, }, { @@ -119,6 +122,7 @@ "decision_environment": ["view"], "credential_type": ["view"], "eda_credential": ["view"], + "webhook": ["view"], }, }, { @@ -135,6 +139,7 @@ "decision_environment": ["view"], "credential_type": ["view"], "eda_credential": ["view"], + "webhook": ["view"], }, }, { @@ -151,6 +156,7 @@ "decision_environment": ["view"], "credential_type": ["view"], "eda_credential": ["view"], + "webhook": ["view"], }, }, ] diff --git a/src/aap_eda/core/migrations/0041_webhook_activation_webhooks_webhook_ix_webhook_id_and_more.py b/src/aap_eda/core/migrations/0041_webhook_activation_webhooks_webhook_ix_webhook_id_and_more.py new file mode 100644 index 000000000..97db6da56 --- /dev/null +++ b/src/aap_eda/core/migrations/0041_webhook_activation_webhooks_webhook_ix_webhook_id_and_more.py @@ -0,0 +1,263 @@ +# Generated by Django 4.2.7 on 2024-06-06 01:08 + +import uuid + +import django.contrib.postgres.fields +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + +import aap_eda.core.enums +import aap_eda.core.models.utils +import aap_eda.core.utils.crypto.fields + + +class Migration(migrations.Migration): + dependencies = [ + ("core", "0040_remove_rulebookprocesslog_line_number"), + ] + + operations = [ + migrations.CreateModel( + name="Webhook", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + models.TextField( + help_text="The name of the webhook", unique=True + ), + ), + ( + "type", + models.TextField( + default="generic", help_text="The type of the webhook" + ), + ), + ( + "secret", + aap_eda.core.utils.crypto.fields.EncryptedTextField( + help_text="The secret for the webhook", null=True + ), + ), + ( + "auth_type", + models.TextField( + choices=[ + ("hmac", "hmac"), + ("token", "token"), + ("basic", "basic"), + ("oauth2", "oauth2"), + ("oauth2-jwt", "oauth2-jwt"), + ("ecdsa", "ecdsa"), + ("mtls", "mtls"), + ], + default=aap_eda.core.enums.WebhookAuthType["HMAC"], + help_text="Authentication method basic, hmac, token, oAuth2, mTLS", + ), + ), + ( + "header_key", + models.TextField( + blank=True, + help_text="The HTTP header which will contain the HMAC Signature or Token", + ), + ), + ( + "hmac_algorithm", + models.TextField( + blank=True, + help_text="The algorithm to use for HMAC verification", + ), + ), + ( + "hmac_format", + models.TextField( + blank=True, + choices=[("base64", "base64"), ("hex", "hex")], + help_text="The format of the signature can be base64 or hex", + ), + ), + ( + "hmac_signature_prefix", + models.TextField( + blank=True, help_text="The prefix in the signature" + ), + ), + ( + "additional_data_headers", + django.contrib.postgres.fields.ArrayField( + base_field=models.TextField( + help_text="The HTTP header" + ), + blank=True, + help_text="The additional http headers which will be added to the event data", + null=True, + size=None, + ), + ), + ( + "test_mode", + models.BooleanField( + default=False, help_text="Enable test mode" + ), + ), + ( + "test_content_type", + models.TextField( + default="", + help_text="The content type of test data, when in test mode", + null=True, + ), + ), + ( + "test_content", + models.TextField( + default="", + help_text="The content recieved, when in test mode, stored as a yaml string", + null=True, + ), + ), + ( + "test_error_message", + models.TextField( + default="", + help_text="The error message, when in test mode", + null=True, + ), + ), + ( + "username", + models.TextField( + blank=True, + default="", + help_text="The username for basic auth", + null=True, + ), + ), + ("uuid", models.UUIDField(default=uuid.uuid4)), + ( + "url", + models.TextField( + help_text="The URL which will be used to post the data to the webhook" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ( + "client_secret", + aap_eda.core.utils.crypto.fields.EncryptedTextField( + help_text="The client secret OAUTH2", null=True + ), + ), + ( + "client_id", + models.TextField( + blank=True, + default="", + help_text="The client id for OAUTH2", + null=True, + ), + ), + ( + "introspection_url", + models.TextField( + blank=True, + default="", + help_text="The introspection URL for OAUTH2", + null=True, + ), + ), + ( + "jwks_url", + models.TextField( + blank=True, + default="", + help_text="The JWKS URL for OAUTH2 with JWT", + null=True, + ), + ), + ( + "audience", + models.TextField( + blank=True, + default="", + help_text="The audience for JWT", + null=True, + ), + ), + ( + "public_key", + models.TextField( + blank=True, + default="", + help_text="The Public key used with ECDSA", + null=True, + ), + ), + ( + "ecdsa_prefix_header_key", + models.TextField( + blank=True, + default="", + help_text="The Header Key for ECDSA with optional prefix content", + null=True, + ), + ), + ( + "certificate_subject", + models.TextField( + blank=True, + default="", + help_text="The subject name from certificate", + null=True, + ), + ), + ( + "organization", + models.ForeignKey( + default=aap_eda.core.models.utils.get_default_organization_id, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="core.organization", + ), + ), + ( + "owner", + models.ForeignKey( + help_text="The user who created the webhook", + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "db_table": "core_webhook", + }, + ), + migrations.AddField( + model_name="activation", + name="webhooks", + field=models.ManyToManyField(default=None, to="core.webhook"), + ), + migrations.AddIndex( + model_name="webhook", + index=models.Index(fields=["id"], name="ix_webhook_id"), + ), + migrations.AddIndex( + model_name="webhook", + index=models.Index(fields=["name"], name="ix_webhook_name"), + ), + migrations.AddIndex( + model_name="webhook", + index=models.Index(fields=["uuid"], name="ix_webhook_uuid"), + ), + ] diff --git a/src/aap_eda/core/models/__init__.py b/src/aap_eda/core/models/__init__.py index 38b6232a7..59add376e 100644 --- a/src/aap_eda/core/models/__init__.py +++ b/src/aap_eda/core/models/__init__.py @@ -45,6 +45,7 @@ ) from .team import Team from .user import AwxToken, User +from .webhook import Webhook __all__ = [ "ActivationInstanceJobInstance", @@ -73,6 +74,7 @@ "EventStream", "Organization", "Team", + "Webhook", ] permission_registry.register( @@ -83,6 +85,7 @@ Project, Organization, Team, + Webhook, parent_field_name="organization", ) permission_registry.register( diff --git a/src/aap_eda/core/models/activation.py b/src/aap_eda/core/models/activation.py index 83007b0b9..bbf52a577 100644 --- a/src/aap_eda/core/models/activation.py +++ b/src/aap_eda/core/models/activation.py @@ -25,6 +25,7 @@ from .base import BaseOrgModel, UniqueNamedModel from .mixins import StatusHandlerModelMixin from .user import AwxToken, User +from .webhook import Webhook __all__ = ("Activation",) @@ -127,3 +128,4 @@ class Meta: blank=True, help_text="Name of the kubernetes service", ) + webhooks = models.ManyToManyField(Webhook, default=None) diff --git a/src/aap_eda/core/models/webhook.py b/src/aap_eda/core/models/webhook.py new file mode 100644 index 000000000..1cc3d04bf --- /dev/null +++ b/src/aap_eda/core/models/webhook.py @@ -0,0 +1,179 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid + +from django.contrib.postgres.fields import ArrayField +from django.db import models + +from aap_eda.core.enums import HMACFormatType, WebhookAuthType +from aap_eda.core.utils.crypto.fields import EncryptedTextField + +from .base import BaseOrgModel + +__all__ = "Webhook" + +EDA_WEBHOOK_CHANNEL_PREFIX = "eda_webhook_" + + +class Webhook(BaseOrgModel): + class Meta: + db_table = "core_webhook" + indexes = [ + models.Index(fields=["id"], name="ix_webhook_id"), + models.Index(fields=["name"], name="ix_webhook_name"), + models.Index(fields=["uuid"], name="ix_webhook_uuid"), + ] + + name = models.TextField( + null=False, unique=True, help_text="The name of the webhook" + ) + type = models.TextField( + null=False, help_text="The type of the webhook", default="generic" + ) + secret = EncryptedTextField( + null=True, help_text="The secret for the webhook" + ) + auth_type = models.TextField( + null=False, + choices=WebhookAuthType.choices(), + default=WebhookAuthType.HMAC, + help_text="Authentication method basic, hmac, token, oAuth2, mTLS", + ) + header_key = models.TextField( + blank=True, + help_text=( + "The HTTP header which will contain the HMAC " "Signature or Token" + ), + ) + hmac_algorithm = models.TextField( + blank=True, + help_text="The algorithm to use for HMAC verification", + ) + hmac_format = models.TextField( + blank=True, + choices=HMACFormatType.choices(), + help_text="The format of the signature can be base64 or hex", + ) + hmac_signature_prefix = models.TextField( + blank=True, + help_text="The prefix in the signature", + ) + + additional_data_headers = ArrayField( + models.TextField(help_text="The HTTP header"), + null=True, + blank=True, + help_text=( + "The additional http headers which will " + "be added to the event data" + ), + ) + + test_mode = models.BooleanField( + default=False, help_text="Enable test mode" + ) + + test_content_type = models.TextField( + null=True, + default="", + help_text="The content type of test data, when in test mode", + ) + test_content = models.TextField( + null=True, + default="", + help_text=( + "The content recieved, when in test mode, " + "stored as a yaml string" + ), + ) + test_error_message = models.TextField( + null=True, + default="", + help_text="The error message, when in test mode", + ) + + owner = models.ForeignKey( + "User", + on_delete=models.CASCADE, + null=False, + help_text="The user who created the webhook", + ) + username = models.TextField( + null=True, + blank=True, + default="", + help_text="The username for basic auth", + ) + uuid = models.UUIDField(default=uuid.uuid4) + url = models.TextField( + null=False, + help_text="The URL which will be used to post the data to the webhook", + ) + created_at = models.DateTimeField(auto_now_add=True, null=False) + modified_at = models.DateTimeField(auto_now=True, null=False) + client_secret = EncryptedTextField( + null=True, help_text="The client secret OAUTH2" + ) + client_id = models.TextField( + null=True, + blank=True, + default="", + help_text="The client id for OAUTH2", + ) + introspection_url = models.TextField( + null=True, + blank=True, + default="", + help_text="The introspection URL for OAUTH2", + ) + jwks_url = models.TextField( + null=True, + blank=True, + default="", + help_text="The JWKS URL for OAUTH2 with JWT", + ) + audience = models.TextField( + null=True, + blank=True, + default="", + help_text="The audience for JWT", + ) + public_key = models.TextField( + null=True, + blank=True, + default="", + help_text="The Public key used with ECDSA", + ) + ecdsa_prefix_header_key = models.TextField( + null=True, + blank=True, + default="", + help_text="The Header Key for ECDSA with optional prefix content", + ) + certificate_subject = models.TextField( + null=True, + blank=True, + default="", + help_text="The subject name from certificate", + ) + + def _get_channel_name(self) -> str: + """Generate the channel name based on the UUID and prefix.""" + return ( + f"{EDA_WEBHOOK_CHANNEL_PREFIX}" + f"{str(self.uuid).replace('-','_')}" + ) + + channel_name = property(_get_channel_name) diff --git a/src/aap_eda/core/validators.py b/src/aap_eda/core/validators.py index 2a60dbe43..91cc029b9 100644 --- a/src/aap_eda/core/validators.py +++ b/src/aap_eda/core/validators.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import hashlib import logging import typing as tp @@ -235,3 +236,46 @@ def check_credential_types( raise serializers.ValidationError( f"The type of credential can only be one of {names}" ) + + +def valid_hash_algorithm(algo: str): + if algo not in hashlib.algorithms_available: + raise serializers.ValidationError( + ( + f"Invalid hash algorithm {algo} should " + f"be one of {hashlib.algorithms_available}" + ) + ) + + +def valid_hash_format(fmt: str): + if fmt not in enums.HMACFormatType.values(): + raise serializers.ValidationError( + ( + f"Invalid hash format {fmt} should " + f"be one of {enums.HMACFormatType.values()}" + ) + ) + return fmt + + +def valid_webhook_auth_type(auth_type: str): + if auth_type not in enums.WebhookAuthType.values(): + raise serializers.ValidationError( + ( + f"Invalid auth_type {auth_type} should " + f"be one of {enums.WebhookAuthType.values()}" + ) + ) + return auth_type + + +def check_if_webhooks_exists(webhook_ids: list[int]) -> list[int]: + for webhook_id in webhook_ids: + try: + models.Webhook.objects.get(pk=webhook_id) + except models.Webhook.DoesNotExist: + raise serializers.ValidationError( + f"Webhook with id {webhook_id} does not exist" + ) + return webhook_ids diff --git a/src/aap_eda/services/pg_notify.py b/src/aap_eda/services/pg_notify.py new file mode 100644 index 000000000..b00c62e9d --- /dev/null +++ b/src/aap_eda/services/pg_notify.py @@ -0,0 +1,100 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import uuid + +import psycopg +import xxhash +from django.conf import settings + +logger = logging.getLogger(__name__) + +MAX_MESSAGE_LENGTH = settings.MAX_PG_NOTIFY_MESSAGE_SIZE +MESSAGE_CHUNKED_UUID = "_message_chunked_uuid" +MESSAGE_CHUNK_COUNT = "_message_chunk_count" +MESSAGE_CHUNK_SEQUENCE = "_message_chunk_sequence" +MESSAGE_CHUNK = "_chunk" +MESSAGE_LENGTH = "_message_length" +MESSAGE_XX_HASH = "_message_xx_hash" + + +class PGNotify: + """The PGNotify action sends an event to a PG Pub Sub Channel. + + Needs + dsn https://www.postgresql.org/docs/current/libpq-connect.html + #LIBPQ-CONNSTRING-KEYWORD-VALUE + channel the channel name to send the notifies + event + """ + + def __init__(self, dsn: str, channel: str, data: dict): + self.dsn = dsn + self.channel = channel + self.data = data + + def __call__(self): + try: + with psycopg.connect( + conninfo=self.dsn, + autocommit=True, + ) as conn: + with conn.cursor() as cursor: + payload = json.dumps(self.data) + message_length = len(payload) + logger.debug(f"Message length {message_length}") + if message_length >= MAX_MESSAGE_LENGTH: + xx_hash = xxhash.xxh32( + payload.encode("utf-8") + ).hexdigest() + logger.debug("Message length exceeds, will chunk") + message_uuid = str(uuid.uuid4()) + number_of_chunks = ( + int(message_length / MAX_MESSAGE_LENGTH) + 1 + ) + chunked = { + MESSAGE_CHUNKED_UUID: message_uuid, + MESSAGE_CHUNK_COUNT: number_of_chunks, + MESSAGE_LENGTH: message_length, + MESSAGE_XX_HASH: xx_hash, + } + logger.debug(f"Chunk info {message_uuid}") + logger.debug(f"Number of chunks {number_of_chunks}") + logger.debug(f"Total data size {message_length}") + logger.debug(f"XX Hash {xx_hash}") + + sequence = 1 + for i in range(0, message_length, MAX_MESSAGE_LENGTH): + chunked[MESSAGE_CHUNK] = payload[ + i : i + MAX_MESSAGE_LENGTH + ] + chunked[MESSAGE_CHUNK_SEQUENCE] = sequence + sequence += 1 + + logger.debug( + f"Chunked Length {len(json.dumps(chunked))}" + ) + cursor.execute( + f"NOTIFY {self.channel}, " + f"$${json.dumps(chunked)}$$;" + ) + else: + cursor.execute( + f"NOTIFY {self.channel}, " f"$${payload}$$;" + ) + except psycopg.OperationalError as e: + logger.error(f"PG Notify operational error {e}") + raise e diff --git a/src/aap_eda/settings/default.py b/src/aap_eda/settings/default.py index 2200511a7..361818ad8 100644 --- a/src/aap_eda/settings/default.py +++ b/src/aap_eda/settings/default.py @@ -647,3 +647,27 @@ def get_rulebook_process_log_level() -> RulebookProcessLogLevel: "SAFE_PLUGINS_FOR_PORT_FORWARD", ["ansible.eda.webhook", "ansible.eda.alertmanager"], ) + +_DEFAULT_PG_NOTIFY_DSN_SERVER = ( + f"host={DATABASES['default']['HOST']} " + f"port={DATABASES['default']['PORT']} " + f"dbname={DATABASES['default']['NAME']} " + f"user={DATABASES['default']['USER']} " + f"password={DATABASES['default']['PASSWORD']}" +) +PG_NOTIFY_DSN_SERVER = settings.get( + "PG_NOTIFY_DSN_SERVER", _DEFAULT_PG_NOTIFY_DSN_SERVER +) +SERVER_UUID = settings.get("SERVER_UUID", "abc-def-123-34567") +WEBHOOK_URL_PREFIX = settings.get( + "WEBHOOK_URL_PREFIX", + f"https://ui.eda.local:8443/{SERVER_UUID}/api/eda/v1/external_webhook", +) +WEBHOOK_MTLS_URL_PREFIX = settings.get( + "WEBHOOK_MTLS_URL_PREFIX", + f"https://ui.eda.local:8443/mtls/{SERVER_UUID}/api/eda/v1/" + "external_webhook", +) +MAX_PG_NOTIFY_MESSAGE_SIZE = int( + settings.get("MAX_PG_NOTIFY_MESSAGE_SIZE", 6144) +) diff --git a/tests/integration/api/test_activation_with_webhook.py b/tests/integration/api/test_activation_with_webhook.py new file mode 100644 index 000000000..a0024a154 --- /dev/null +++ b/tests/integration/api/test_activation_with_webhook.py @@ -0,0 +1,227 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import uuid + +import pytest +import yaml +from rest_framework import status +from rest_framework.test import APIClient + +from aap_eda.core import models +from aap_eda.core.enums import ( + ACTIVATION_STATUS_MESSAGE_MAP, + ActivationStatus, + RestartPolicy, +) +from tests.integration.constants import api_url_v1 + +TEST_EXTRA_VAR = """ +--- +collections: + - community.general + - benthomasson.eda +""" + +TEST_ACTIVATION = { + "name": "test-activation", + "description": "test activation", + "is_enabled": True, + "decision_environment_id": 1, + "project_id": 1, + "rulebook_id": 1, + "extra_var": TEST_EXTRA_VAR, + "restart_policy": RestartPolicy.ON_FAILURE, + "restart_count": 0, + "status_message": "", +} + +TEST_AWX_TOKEN = { + "name": "test-awx-token", + "description": "test AWX token", + "token": "abc123xyx", +} + +PROJECT_GIT_HASH = "684f62df18ce5f8d5c428e53203b9b975426eed0" + +TEST_PROJECT = { + "git_hash": PROJECT_GIT_HASH, + "name": "test-project-01", + "url": "https://git.example.com/acme/project-01", + "description": "test project", +} + +TEST_RULEBOOK = { + "name": "test-rulebook.yml", + "description": "test rulebook", +} + +TEST_DECISION_ENV = { + "name": "test-de", + "description": "test de", + "image_url": "quay.io/ansible/ansible-rulebook", +} + +TEST_RULESETS = """ +--- +- name: hello + hosts: localhost + gather_facts: false + sources: + - name: demo + ansible.eda.range: + limit: 10 + rules: + - name: rule1 + condition: true + action: + debug: +""" + + +def create_activation_related_data(with_project=True): + user = models.User.objects.create_user( + username="luke.skywalker", + first_name="Luke", + last_name="Skywalker", + email="luke.skywalker@example.com", + password="secret", + ) + user_id = user.pk + models.AwxToken.objects.create( + name=TEST_AWX_TOKEN["name"], + token=TEST_AWX_TOKEN["token"], + user=user, + ) + credential_id = models.Credential.objects.create( + name="test-credential", + description="test credential", + credential_type="Container Registry", + username="dummy-user", + secret="dummy-password", + ).pk + decision_environment_id = models.DecisionEnvironment.objects.create( + name=TEST_DECISION_ENV["name"], + image_url=TEST_DECISION_ENV["image_url"], + description=TEST_DECISION_ENV["description"], + credential_id=credential_id, + ).pk + project_id = ( + models.Project.objects.create( + git_hash=TEST_PROJECT["git_hash"], + name=TEST_PROJECT["name"], + url=TEST_PROJECT["url"], + description=TEST_PROJECT["description"], + ).pk + if with_project + else None + ) + rulebook_id = ( + models.Rulebook.objects.create( + name=TEST_RULEBOOK["name"], + rulesets=TEST_RULESETS, + description=TEST_RULEBOOK["description"], + project_id=project_id, + ).pk + if with_project + else None + ) + + webhook1 = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook1", + owner=user, + ) + webhook2 = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook2", + owner=user, + ) + return { + "user_id": user_id, + "decision_environment_id": decision_environment_id, + "project_id": project_id, + "rulebook_id": rulebook_id, + "extra_var": TEST_EXTRA_VAR, + "credential_id": credential_id, + "webhooks": [webhook1, webhook2], + } + + +def create_activation(fks: dict): + activation_data = TEST_ACTIVATION.copy() + activation_data["decision_environment_id"] = fks["decision_environment_id"] + activation_data["project_id"] = fks["project_id"] + activation_data["rulebook_id"] = fks["rulebook_id"] + activation_data["user_id"] = fks["user_id"] + activation = models.Activation(**activation_data) + activation.save() + for webhook in fks["webhooks"]: + activation.webhooks.add(webhook) + + return activation + + +@pytest.mark.django_db +def test_create_activation_with_webhooks( + admin_client: APIClient, preseed_credential_types +): + fks = create_activation_related_data() + test_activation = TEST_ACTIVATION.copy() + test_activation["decision_environment_id"] = fks["decision_environment_id"] + test_activation["rulebook_id"] = fks["rulebook_id"] + test_activation["webhooks"] = [webhook.id for webhook in fks["webhooks"]] + + admin_client.post( + f"{api_url_v1}/users/me/awx-tokens/", data=TEST_AWX_TOKEN + ) + response = admin_client.post( + f"{api_url_v1}/activations/", data=test_activation + ) + assert response.status_code == status.HTTP_201_CREATED + data = response.data + activation = models.Activation.objects.filter(id=data["id"]).first() + assert activation.rulebook_name == TEST_RULEBOOK["name"] + swapped_ruleset = yaml.safe_load(activation.rulebook_rulesets) + assert list(swapped_ruleset[0]["sources"][0].keys()) == [ + "ansible.eda.pg_listener", + "name", + ] + assert activation.status == ActivationStatus.PENDING + assert ( + activation.status_message + == ACTIVATION_STATUS_MESSAGE_MAP[activation.status] + ) + assert data["webhooks"][0]["name"] == "test-webhook1" + assert data["webhooks"][1]["name"] == "test-webhook2" + + +@pytest.mark.django_db +def test_create_activation_with_bad_webhook(admin_client: APIClient): + fks = create_activation_related_data() + test_activation = TEST_ACTIVATION.copy() + test_activation["decision_environment_id"] = fks["decision_environment_id"] + test_activation["rulebook_id"] = fks["rulebook_id"] + test_activation["webhooks"] = [1492] + + admin_client.post( + f"{api_url_v1}/users/me/awx-tokens/", data=TEST_AWX_TOKEN + ) + response = admin_client.post( + f"{api_url_v1}/activations/", data=test_activation + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + str(response.data["webhooks"][0]) + == "Webhook with id 1492 does not exist" + ) diff --git a/tests/integration/api/test_webhook.py b/tests/integration/api/test_webhook.py new file mode 100644 index 000000000..f8df119b2 --- /dev/null +++ b/tests/integration/api/test_webhook.py @@ -0,0 +1,554 @@ +# Copyright 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import base64 +import hashlib +import hmac +import secrets +import uuid +from typing import List +from urllib.parse import urlencode + +import pytest +import yaml +from rest_framework import status +from rest_framework.renderers import JSONRenderer +from rest_framework.test import APIClient + +from aap_eda.core import models +from tests.integration.constants import api_url_v1 + + +@pytest.mark.django_db +def test_list_webhooks( + admin_client: APIClient, + default_webhooks: List[models.Webhook], + default_vault_credential, +): + response = admin_client.get(f"{api_url_v1}/webhooks/") + assert response.status_code == status.HTTP_200_OK + assert len(response.data["results"]) == 2 + assert response.data["results"][0]["name"] == default_webhooks[0].name + assert response.data["results"][0]["owner"] == "luke.skywalker" + + +@pytest.mark.django_db +def test_retrieve_webhook( + admin_client: APIClient, + default_webhooks: List[models.Webhook], + default_vault_credential, +): + webhook = default_webhooks[0] + response = admin_client.get(f"{api_url_v1}/webhooks/{webhook.id}/") + assert response.status_code == status.HTTP_200_OK + assert response.data["name"] == default_webhooks[0].name + assert response.data["url"] == default_webhooks[0].url + assert response.data["owner"] == "luke.skywalker" + + +@pytest.mark.django_db +def test_create_webhook( + admin_client: APIClient, +): + data_in = { + "name": "test_webhook", + "header_key": "xyz", + "hmac_signature_prefix": "xyz=", + "secret": "some secret value", + "auth_type": "hmac", + } + response = admin_client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_201_CREATED + result = response.data + assert result["name"] == "test_webhook" + assert result["owner"] == "test.admin" + assert result["header_key"] == "xyz" + assert result["hmac_signature_prefix"] == "xyz=" + + +@pytest.mark.django_db +def test_post_webhook(admin_client: APIClient): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + data_in = { + "name": "test-webhook-1", + "secret": secret, + "header_key": signature_header_name, + "auth_type": "hmac", + "hmac_format": "hex", + } + + response = admin_client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_201_CREATED + webhook = models.Webhook.objects.get(id=response.data["id"]) + + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = {signature_header_name: hash_object.hexdigest()} + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_bad_secret( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + test_mode=True, + ) + bad_secret = secrets.token_hex(32) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + bad_secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = {signature_header_name: hash_object.hexdigest()} + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + webhook.refresh_from_db() + assert ( + webhook.test_error_message + == "Signature mismatch, check your payload and secret" + ) + + +@pytest.mark.django_db +def test_post_webhook_with_prefix( + base_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + auth_type="hmac", + hmac_format="hex", + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: f"{hmac_signature_prefix}" + f"{hash_object.hexdigest()}" + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_test_mode( + base_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + test_mode=True, + auth_type="hmac", + hmac_format="hex", + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: ( + f"{hmac_signature_prefix}" f"{hash_object.hexdigest()}" + ) + } + response = base_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + webhook.refresh_from_db() + test_data = yaml.safe_load(webhook.test_content) + assert test_data["a"] == 1 + assert test_data["b"] == 2 + assert test_data["eda_webhook_name"] == "test-webhook-1" + assert webhook.test_content_type == "application/json" + + +@pytest.mark.django_db +def test_create_webhook_bad_algorithm( + admin_client: APIClient, +): + data_in = { + "name": "test_webhook", + "header_key": "xyz", + "hmac_signature_prefix": "xyz=", + "hmac_algorithm": "nada", + } + response = admin_client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_400_BAD_REQUEST + result = response.data + assert "Invalid hash algorithm nada" in str(result["hmac_algorithm"][0]) + + +@pytest.mark.django_db +def test_create_webhook_bad_format( + admin_client: APIClient, +): + data_in = { + "name": "test_webhook", + "header_key": "xyz", + "hmac_signature_prefix": "xyz=", + "hmac_format": "nada", + } + response = admin_client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_400_BAD_REQUEST + result = response.data + assert "Invalid hash format nada" in str(result["hmac_format"][0]) + + +@pytest.mark.django_db +def test_delete_webhook( + admin_client: APIClient, + default_user: models.User, +): + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret="secret", + ) + + response = admin_client.delete(f"{api_url_v1}/webhooks/{webhook.id}/") + assert response.status_code == status.HTTP_204_NO_CONTENT + + +@pytest.mark.django_db +def test_update_webhook( + admin_client: APIClient, + default_user: models.User, +): + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret="secret", + jwks_url="https://www.example.com", + client_secret="secret", + ) + new_secret = "gobbledegook" + response = admin_client.patch( + f"{api_url_v1}/webhooks/{webhook.id}/", + data={"test_mode": True, "username": "", "secret": new_secret}, + ) + assert response.status_code == status.HTTP_200_OK + + webhook.refresh_from_db() + assert webhook.test_mode + assert webhook.secret.get_secret_value() == new_secret + assert webhook.client_secret.get_secret_value() == "secret" + assert webhook.jwks_url == "https://www.example.com" + + +@pytest.mark.django_db +def test_post_webhook_with_bad_uuid( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + ) + data = {"a": 1, "b": 2} + data_bytes = JSONRenderer().render(data) + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: f"{hmac_signature_prefix}" + f"{hash_object.hexdigest()}" + } + response = admin_client.post( + f"{api_url_v1}/external_webhook/{str(uuid.uuid4())}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.django_db +def test_post_webhook_with_form_urlencoded( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + auth_type="hmac", + hmac_format="hex", + ) + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + headers = { + signature_header_name: ( + f"{hmac_signature_prefix}" f"{hash_object.hexdigest()}" + ), + "Content-Type": content_type, + } + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_base64_format( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + hmac_signature_prefix = "sha256=" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + hmac_signature_prefix=hmac_signature_prefix, + hmac_format="base64", + ) + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + hash_object = hmac.new( + secret.encode("utf-8"), msg=data_bytes, digestmod=hashlib.sha256 + ) + b64_signature = base64.b64encode(hash_object.digest()).decode() + headers = { + signature_header_name: (f"{hmac_signature_prefix}" f"{b64_signature}"), + "Content-Type": content_type, + } + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_basic_auth( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + username = "fred" + signature_header_name = "Authorization" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + auth_type="basic", + username=username, + ) + user_pass = f"{username}:{secret}" + auth_value = f"Basic {base64.b64encode(user_pass.encode()).decode()}" + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + headers = { + signature_header_name: auth_value, + "Content-Type": content_type, + } + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_token( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "my-secret-header" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + secret=secret, + header_key=signature_header_name, + auth_type="token", + ) + data = {"a": 1, "b": 2} + content_type = "application/x-www-form-urlencoded" + data_bytes = urlencode(data).encode() + headers = { + signature_header_name: secret, + "Content-Type": content_type, + } + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data_bytes, + content_type=content_type, + ) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_post_webhook_with_test_mode_extra_headers( + admin_client: APIClient, + default_user: models.User, +): + secret = secrets.token_hex(32) + signature_header_name = "X-Gitlab-Token" + webhook = models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + url="http://www.example.com", + auth_type="token", + secret=secret, + header_key=signature_header_name, + test_mode=True, + additional_data_headers=[ + "X-Gitlab-Event", + "X-Gitlab-Event-UUID", + "X-Gitlab-Webhook-UUID", + ], + ) + data = {"a": 1, "b": 2} + headers = { + "X-Gitlab-Event-UUID": "c2675c66-7e6e-4fe2-9ac3-288534ef34b9", + "X-Gitlab-Instance": "https://gitlab.com", + signature_header_name: secret, + "X-Gitlab-Webhook-UUID": "b697868f-3b59-4a1f-985d-47f79e2b05ff", + "X-Gitlab-Event": "Push Hook", + } + + response = admin_client.post( + f"{api_url_v1}/external_webhook/{webhook.uuid}/post/", + headers=headers, + data=data, + ) + assert response.status_code == status.HTTP_200_OK + + webhook.refresh_from_db() + test_data = yaml.safe_load(webhook.test_content) + assert test_data["a"] == 1 + assert test_data["b"] == 2 + assert ( + test_data["X-Gitlab-Event-UUID"] + == "c2675c66-7e6e-4fe2-9ac3-288534ef34b9" + ) + assert ( + test_data["X-Gitlab-Webhook-UUID"] + == "b697868f-3b59-4a1f-985d-47f79e2b05ff" + ) + assert test_data["X-Gitlab-Event"] == "Push Hook" + assert test_data["eda_webhook_name"] == "test-webhook-1" + assert webhook.test_content_type == "application/json" + + +@pytest.mark.django_db +def test_create_webhook_update_subject( + admin_client: APIClient, +): + data_in = { + "name": "test_webhook", + "header_key": "Subject", + "certficate_subject": "Fred", + "auth_type": "mtls", + } + response = admin_client.post(f"{api_url_v1}/webhooks/", data=data_in) + assert response.status_code == status.HTTP_201_CREATED + response = admin_client.patch( + f"{api_url_v1}/webhooks/{response.data['id']}/", + data={"certificate_subject": "barney", "hmac_algorithm": ""}, + ) + assert response.status_code == status.HTTP_200_OK diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index f77d2cfe4..c38a8ea22 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1052,3 +1052,43 @@ def _factory(logger): @pytest.fixture def container_engine_mock() -> MagicMock: return create_autospec(ContainerEngine, instance=True) + + +@pytest.fixture +def default_webhooks( + default_organization: models.Organization, default_user: models.User +) -> List[models.Webhook]: + return models.Webhook.objects.bulk_create( + [ + models.Webhook( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + organization=default_organization, + url="http://www.example.com", + secret="secret", + ), + models.Webhook( + uuid=uuid.uuid4(), + name="test-webhook-2", + owner=default_user, + organization=default_organization, + url="http://www.example.com", + secret="secret", + ), + ] + ) + + +@pytest.fixture +def default_webhook( + default_organization: models.Organization, default_user: models.User +) -> models.Webhook: + return models.Webhook.objects.create( + uuid=uuid.uuid4(), + name="test-webhook-1", + owner=default_user, + organization=default_organization, + url="http://www.example.com", + secret="secret", + ) diff --git a/tools/docker/docker-compose-dev.yaml b/tools/docker/docker-compose-dev.yaml index 376282805..698733e0f 100644 --- a/tools/docker/docker-compose-dev.yaml +++ b/tools/docker/docker-compose-dev.yaml @@ -29,6 +29,12 @@ x-environment: &common-env EDA_ANSIBLE_RULEBOOK_FLUSH_AFTER: ${EDA_ANSIBLE_RULEBOOK_FLUSH_AFTER:-100} EDA_DEFAULT_QUEUE_TIMEOUT: ${EDA_DEFAULT_QUEUE_TIMEOUT:-300} EDA_DEFAULT_RULEBOOK_QUEUE_TIMEOUT: ${EDA_DEFAULT_RULEBOOK_QUEUE_TIMEOUT:-120} + EDA_SERVER_UUID: edgecafe-beef-feed-fade-decadeedgecafe + EDA_PG_NOTIFY_DSN: "host=host.containers.internal port=5432 dbname=eda user=postgres password=secret" + EDA_PG_NOTIFY_DSN_SERVER: "host=postgres port=5432 dbname=eda user=postgres password=secret" + EDA_WEBHOOK_URL_PREFIX: ${EDA_WEBHOOK_URL_PREFIX:-'https://my_nginix_server/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook'} + EDA_WEBHOOK_HOST: ${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} + EDA_WEBHOOK_SERVER: http://${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} services: podman-pre-setup-node1: @@ -81,7 +87,7 @@ services: ports: - '${EDA_UI_PORT:-8443}:443' depends_on: - eda-api: + eda-webhook-api: condition: service_healthy eda-api: @@ -262,6 +268,27 @@ services: ports: - '${EDA_PROXY_PORT:-3128}:3128' + eda-webhook-api: + image: "${EDA_IMAGE:-quay.io/ansible/eda-server:main}" + environment: *common-env + command: + - /bin/bash + - -c + - >- + gunicorn -b 0.0.0.0:8000 + -w ${EDA_API_WORKERS:-4} aap_eda.wsgi + --access-logfile - + ports: + - '8555:8000' + depends_on: + eda-api: + condition: service_healthy + healthcheck: + test: [ 'CMD', 'curl', '-q', 'http://localhost:8000/_healthz' ] + interval: 30s + timeout: 5s + retries: 10 + volumes: postgres_data: {} podman_data_node1: {} diff --git a/tools/docker/docker-compose-mac.yml b/tools/docker/docker-compose-mac.yml index 14d46f9e7..552a13531 100644 --- a/tools/docker/docker-compose-mac.yml +++ b/tools/docker/docker-compose-mac.yml @@ -28,6 +28,13 @@ x-environment: - EDA_ANSIBLE_RULEBOOK_FLUSH_AFTER=${EDA_ANSIBLE_RULEBOOK_FLUSH_AFTER:-100} - EDA_DEFAULT_QUEUE_TIMEOUT=${EDA_DEFAULT_QUEUE_TIMEOUT:-300} - EDA_DEFAULT_RULEBOOK_QUEUE_TIMEOUT=${EDA_DEFAULT_RULEBOOK_QUEUE_TIMEOUT:-120} + - EDA_SERVER_UUID=edgecafe-beef-feed-fade-decadeedgecafe + - EDA_PG_NOTIFY_DSN="host=host.containers.internal port=5432 dbname=eda user=postgres password=secret" + - EDA_PG_NOTIFY_DSN_SERVER="host=postgres port=5432 dbname=eda user=postgres password=secret" + - EDA_WEBHOOK_URL_PREFIX=${EDA_WEBHOOK_URL_PREFIX:-https://my_nginix_server/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook} + - EDA_WEBHOOK_MTLS_URL_PREFIX=${EDA_WEBHOOK_MTLS_URL_PREFIX:-https://my_nginix_server/mtls/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook} + - EDA_WEBHOOK_HOST=${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} + - EDA_WEBHOOK_SERVER=http://${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} services: eda-ui: @@ -36,7 +43,7 @@ services: ports: - '${EDA_UI_PORT:-8443}:443' depends_on: - eda-api: + eda-webhook-api: condition: service_healthy eda-api: @@ -185,5 +192,26 @@ services: ports: - '${EDA_PROXY_PORT:-3128}:3128' + eda-webhook-api: + image: "${EDA_IMAGE:-quay.io/ansible/eda-server:main}" + environment: *common-env + security_opt: + - label=disable + command: + - /bin/bash + - -c + - aap-eda-manage runserver 0.0.0.0:8000 + ports: + - '8555:8000' + depends_on: + eda-api: + condition: service_healthy + healthcheck: + test: [ 'CMD', 'curl', '-q', 'http://0.0.0.0:8000/_healthz' ] + interval: 30s + timeout: 5s + retries: 10 + start_period: 15s + volumes: postgres_data: {} diff --git a/tools/docker/docker-compose-stage.yaml b/tools/docker/docker-compose-stage.yaml index cfb63e7e8..6c8761f37 100644 --- a/tools/docker/docker-compose-stage.yaml +++ b/tools/docker/docker-compose-stage.yaml @@ -25,6 +25,13 @@ x-environment: - EDA_ANSIBLE_BASE_JWT_VALIDATE_CERT=${EDA_ANSIBLE_BASE_JWT_VALIDATE_CERT:-False} - EDA_ANSIBLE_BASE_JWT_KEY=${EDA_ANSIBLE_BASE_JWT_KEY:-'https://localhost'} - EDA_DIRECT_SHARED_RESOURCE_MANAGEMENT_ENABLED=${EDA_DIRECT_SHARED_RESOURCE_MANAGEMENT_ENABLED:-False} + - EDA_SERVER_UUID=edgecafe-beef-feed-fade-decadeedgecafe + - EDA_PG_NOTIFY_DSN="host=host.containers.internal port=5432 dbname=eda user=postgres password=secret" + - EDA_PG_NOTIFY_DSN_SERVER="host=postgres port=5432 dbname=eda user=postgres password=secret" + - EDA_WEBHOOK_URL_PREFIX=${EDA_WEBHOOK_URL_PREFIX:-https://my_nginix_server/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook} + - EDA_WEBHOOK_MTLS_URL_PREFIX=${EDA_WEBHOOK_MTLS_URL_PREFIX:-https://my_nginix_server/mtls/edgecafe-beef-feed-fade-decadeedgecafe/api/eda/v1/external_webhook} + - EDA_WEBHOOK_HOST=${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} + - EDA_WEBHOOK_SERVER=http://${EDA_WEBHOOK_HOST:-eda-webhook-api:8000} services: podman-pre-setup: @@ -55,7 +62,7 @@ services: ports: - '8443:443' depends_on: - eda-api: + eda-webhook-api: condition: service_healthy eda-api: @@ -194,6 +201,27 @@ services: ports: - '${EDA_PROXY_PORT:-3128}:3128' + eda-webhook-api: + image: "${EDA_IMAGE:-quay.io/ansible/eda-server:main}" + environment: *common-env + command: + - /bin/bash + - -c + - >- + gunicorn -b 0.0.0.0:8000 + -w ${EDA_API_WORKERS:-4} aap_eda.wsgi + --access-logfile - + ports: + - '8555:8000' + depends_on: + eda-api: + condition: service_healthy + healthcheck: + test: [ 'CMD', 'curl', '-q', 'http://localhost:8000/_healthz' ] + interval: 30s + timeout: 5s + retries: 10 + volumes: postgres_data: {} podman_data: {}